diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index fcf34e569555..5cf566de0f9a 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -37,9 +37,13 @@ docs/bigquery_datatransfer/ @googleapis/api-bigquery bigquery_storage/ @googleapis/api-bigquery docs/bigquery_storage/ @googleapis/api-bigquery +# Data Catalog isn't technically part of BigQuery, but it's closely related. +datacatalog/ @googleapis/api-bigquery +docs/datacatalog/ @googleapis/api-bigquery + # Pubsub -pubsub/ @anguillanneuf @plamut -docs/pubsub @anguillanneuf @plamut +pubsub/ @anguillanneuf @plamut @pradn +docs/pubsub @anguillanneuf @plamut @pradn # Tim Swast is the primary author of Runtime Config. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000000..d0bd7b92b3c7 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/google-cloud-python/issues) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass +- [ ] Code coverage does not decrease (if any source code was changed) +- [ ] Appropriate docs were updated (if necessary) + +Fixes # 🦕 \ No newline at end of file diff --git a/.kokoro/continuous/cloudbuild.cfg b/.kokoro/continuous/cloudbuild.cfg new file mode 100644 index 000000000000..695ad3787290 --- /dev/null +++ b/.kokoro/continuous/cloudbuild.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "cloudbuild" +} diff --git a/.kokoro/continuous/documentai.cfg b/.kokoro/continuous/documentai.cfg new file mode 100644 index 000000000000..c374e377c2cf --- /dev/null +++ b/.kokoro/continuous/documentai.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "documentai" +} diff --git a/.kokoro/continuous/recommender.cfg b/.kokoro/continuous/recommender.cfg new file mode 100644 index 000000000000..7d65909e6343 --- /dev/null +++ b/.kokoro/continuous/recommender.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "recommender" +} diff --git a/.kokoro/docs/cloudbuild.cfg b/.kokoro/docs/cloudbuild.cfg new file mode 100644 index 000000000000..695ad3787290 --- /dev/null +++ b/.kokoro/docs/cloudbuild.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "cloudbuild" +} diff --git a/.kokoro/docs/documentai.cfg b/.kokoro/docs/documentai.cfg new file mode 100644 index 000000000000..c374e377c2cf --- /dev/null +++ b/.kokoro/docs/documentai.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "documentai" +} diff --git a/.kokoro/docs/recommender.cfg b/.kokoro/docs/recommender.cfg new file mode 100644 index 000000000000..7d65909e6343 --- /dev/null +++ b/.kokoro/docs/recommender.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "recommender" +} diff --git a/.kokoro/presubmit/cloudbuild.cfg b/.kokoro/presubmit/cloudbuild.cfg new file mode 100644 index 000000000000..695ad3787290 --- /dev/null +++ b/.kokoro/presubmit/cloudbuild.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "cloudbuild" +} diff --git a/.kokoro/presubmit/documentai.cfg b/.kokoro/presubmit/documentai.cfg new file mode 100644 index 000000000000..c374e377c2cf --- /dev/null +++ b/.kokoro/presubmit/documentai.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "documentai" +} diff --git a/.kokoro/presubmit/recommender.cfg b/.kokoro/presubmit/recommender.cfg new file mode 100644 index 000000000000..7d65909e6343 --- /dev/null +++ b/.kokoro/presubmit/recommender.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "recommender" +} diff --git a/.kokoro/release/cloudbuild.cfg b/.kokoro/release/cloudbuild.cfg new file mode 100644 index 000000000000..695ad3787290 --- /dev/null +++ b/.kokoro/release/cloudbuild.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "cloudbuild" +} diff --git a/.kokoro/release/documentai.cfg b/.kokoro/release/documentai.cfg new file mode 100644 index 000000000000..c374e377c2cf --- /dev/null +++ b/.kokoro/release/documentai.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "documentai" +} diff --git a/.kokoro/release/recommender.cfg b/.kokoro/release/recommender.cfg new file mode 100644 index 000000000000..7d65909e6343 --- /dev/null +++ b/.kokoro/release/recommender.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "recommender" +} diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 3ffb6031096a..f606d71ec2e8 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -49,7 +49,7 @@ You'll have to create a development environment to hack on $ cd hack-on-google-cloud-python # Configure remotes such that you can pull changes from the google-cloud-python # repository into your local repository. - $ git remote add upstream git@github.com:GoogleCloudPlatform/google-cloud-python.git + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git # fetch and merge changes from upstream into master $ git fetch upstream $ git merge upstream/master @@ -60,23 +60,25 @@ repo, from which you can submit a pull request. To work on the codebase and run the tests, we recommend using ``nox``, but you can also use a ``virtualenv`` of your own creation. -.. _repo: https://github.com/GoogleCloudPlatform/google-cloud-python +.. _repo: https://github.com/googleapis/google-cloud-python Using ``nox`` ============= We use `nox `__ to instrument our tests. -- To test your changes, run unit tests with ``nox``:: +You must install nox using Python 3. - $ nox -f datastore/noxfile.py -s unit-2.7 - $ nox -f datastore/noxfile.py -s unit-3.7 +- To test your changes, go to the package directory and run ``nox``:: + + $ nox -s unit-2.7 + $ nox -s unit-3.7 $ ... .. note:: The unit tests and system tests are contained in the individual - ``nox.py`` files in each directory; substitute ``datastore`` in the + ``noxfile.py`` files in each directory; substitute ``datastore`` in the example above with the package of your choice. @@ -85,7 +87,7 @@ We use `nox `__ to instrument our tests. $ export GIT_ROOT=$(pwd) $ cd ${GIT_ROOT}/datastore/ - $ nox -s "unit(py='3.7')" + $ nox -s unit-3.7 .. nox: https://pypi.org/project/nox-automation/ @@ -122,9 +124,13 @@ On Debian/Ubuntu:: Coding Style ************ -- PEP8 compliance, with exceptions defined in the linter configuration. - If you have ``nox`` installed, you can test that you have not introduced - any non-compliant code via:: +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + You can test for non-compliant code via:: $ nox -s lint @@ -135,8 +141,8 @@ Coding Style export GOOGLE_CLOUD_TESTING_BRANCH="master" By doing this, you are specifying the location of the most up-to-date - version of ``google-cloud-python``. The the suggested remote name ``upstream`` - should point to the official ``GoogleCloudPlatform`` checkout and the + version of ``google-cloud-python``. The suggested remote name ``upstream`` + should point to the official ``googleapis`` checkout and the the branch should be the main branch on that remote (``master``). Exceptions to PEP8: @@ -149,10 +155,11 @@ Exceptions to PEP8: Running System Tests ******************** -- To run system tests for a given package, you can execute:: +- To run system tests for a given package, go to the package directory + and execute:: - $ nox -f datastore/noxfile.py -s system-3.7 - $ nox -f datastore/noxfile.py -s system-2.7 + $ nox -s system-3.7 + $ nox -s system-2.7 .. note:: @@ -216,9 +223,10 @@ Running System Tests Running Generated Sample Tests ****************************** -- To run system tests for a given package, you can execute:: +- To run system tests for a given package, go to the package directory + and execute:: - $ nox -f speech/noxfile.py -s samples + $ nox -s samples .. note:: @@ -246,33 +254,13 @@ documentation in this package which references that API or behavior must be changed to reflect the bug fix, ideally in the same commit that fixes the bug or adds the feature. -To build and review docs (where ``${VENV}`` refers to the virtualenv you're -using to develop ``google-cloud-python``): - -#. After following the steps above in "Using a Development Checkout", install - Sphinx and all development requirements in your virtualenv:: - - $ cd ${HOME}/hack-on-google-cloud-python - $ ${VENV}/bin/pip install Sphinx +To build and review docs go to the package directory and execute:: -#. Change into the ``docs`` directory within your ``google-cloud-python`` checkout and - execute the ``make`` command with some flags:: - - $ cd ${HOME}/hack-on-google-cloud-python/google-cloud-python/docs - $ make clean html SPHINXBUILD=${VENV}/bin/sphinx-build - - The ``SPHINXBUILD=...`` argument tells Sphinx to use the virtualenv Python, - which will have both Sphinx and ``google-cloud-python`` (for API documentation - generation) installed. +$ nox -s docs #. Open the ``docs/_build/html/index.html`` file to see the resulting HTML rendering. -As an alternative to 1. and 2. above, if you have ``nox`` installed, you -can build the docs via:: - - $ nox -s docs - ******************************************** Note About ``README`` as it pertains to PyPI ******************************************** @@ -287,13 +275,10 @@ may cause problems creating links or rendering the description. .. _description on PyPI: https://pypi.org/project/google-cloud/ ********************** -CircleCI Configuration +Kokoro Configuration ********************** -All build scripts in the ``.circleci/config.yml`` configuration file which have -Python dependencies are specified in the ``nox.py`` configuration. -They are executed in the Travis build via ``nox -s ${ENV}`` where -``${ENV}`` is the environment being tested. +Build scripts and configurations are in the ``.kokoro`` directory. ************************* diff --git a/README.rst b/README.rst index fc2d936c0271..7188ea5a74c1 100644 --- a/README.rst +++ b/README.rst @@ -28,46 +28,71 @@ priority. The following client libraries have **GA** support: -- `Google BigQuery`_ (`BigQuery README`_) -- `Google Cloud Bigtable`_ (`Bigtable README`_) -- `Google Cloud Datastore`_ (`Datastore README`_) -- `Google Cloud KMS`_ (`KMS README`_) -- `Google Cloud Natural Language`_ (`Natural Language README`_) -- `Google Cloud Pub/Sub`_ (`Pub/Sub README`_) -- `Google Cloud Scheduler`_ (`Scheduler README`_) -- `Google Cloud Spanner`_ (`Spanner README`_) -- `Google Cloud Speech`_ (`Speech README`_) -- `Google Cloud Storage`_ (`Storage README`_) -- `Google Cloud Tasks`_ (`Tasks README`_) -- `Google Cloud Translation`_ (`Translation README`_) -- `Stackdriver Logging`_ (`Logging README`_) +- `Google BigQuery`_ (`BigQuery README`_, `BigQuery Documentation`_) +- `Google Cloud Bigtable`_ (`Bigtable README`_, `Bigtable Documentation`_) +- `Google Cloud Datastore`_ (`Datastore README`_, `Datastore Documentation`_) +- `Google Cloud KMS`_ (`KMS README`_, `KMS Documentation`_) +- `Google Cloud Natural Language`_ (`Natural Language README`_, `Natural Language Documentation`_) +- `Google Cloud Pub/Sub`_ (`Pub/Sub README`_, `Pub/Sub Documentation`_) +- `Google Cloud Scheduler`_ (`Scheduler README`_, `Scheduler Documentation`_) +- `Google Cloud Spanner`_ (`Spanner README`_, `Spanner Documentation`_) +- `Google Cloud Speech to Text`_ (`Speech to Text README`_, `Speech to Text Documentation`_) +- `Google Cloud Storage`_ (`Storage README`_, `Storage Documentation`_) +- `Google Cloud Tasks`_ (`Tasks README`_, `Tasks Documentation`_) +- `Google Cloud Translation`_ (`Translation README`_, `Translation Documentation`_) +- `Stackdriver Logging`_ (`Logging README`_, `Logging Documentation`_) .. _Google BigQuery: https://pypi.org/project/google-cloud-bigquery/ .. _BigQuery README: https://github.com/googleapis/google-cloud-python/tree/master/bigquery +.. _BigQuery Documentation: https://googleapis.dev/python/bigquery/latest + .. _Google Cloud Bigtable: https://pypi.org/project/google-cloud-bigtable/ .. _Bigtable README: https://github.com/googleapis/google-cloud-python/tree/master/bigtable +.. _Bigtable Documentation: https://googleapis.dev/python/bigtable/latest + .. _Google Cloud Datastore: https://pypi.org/project/google-cloud-datastore/ .. _Datastore README: https://github.com/googleapis/google-cloud-python/tree/master/datastore +.. _Datastore Documentation: https://googleapis.dev/python/datastore/latest + .. _Google Cloud KMS: https://pypi.org/project/google-cloud-kms/ .. _KMS README: https://github.com/googleapis/google-cloud-python/tree/master/kms +.. _KMS Documentation: https://googleapis.dev/python/cloudkms/latest + .. _Google Cloud Natural Language: https://pypi.org/project/google-cloud-language/ .. _Natural Language README: https://github.com/googleapis/google-cloud-python/tree/master/language +.. _Natural Language Documentation: https://googleapis.dev/python/language/latest + .. _Google Cloud Pub/Sub: https://pypi.org/project/google-cloud-pubsub/ .. _Pub/Sub README: https://github.com/googleapis/google-cloud-python/tree/master/pubsub +.. _Pub/Sub Documentation: https://googleapis.dev/python/pubsub/latest + .. _Google Cloud Spanner: https://pypi.org/project/google-cloud-spanner .. _Spanner README: https://github.com/googleapis/google-cloud-python/tree/master/spanner -.. _Google Cloud Speech: https://pypi.org/project/google-cloud-speech/ -.. _Speech README: https://github.com/googleapis/google-cloud-python/tree/master/speech +.. _Spanner Documentation: https://googleapis.dev/python/spanner/latest + +.. _Google Cloud Speech to Text: https://pypi.org/project/google-cloud-speech/ +.. _Speech to Text README: https://github.com/googleapis/google-cloud-python/tree/master/speech +.. _Speech to Text Documentation: https://googleapis.dev/python/speech/latest + .. _Google Cloud Storage: https://pypi.org/project/google-cloud-storage/ .. _Storage README: https://github.com/googleapis/google-cloud-python/tree/master/storage +.. _Storage Documentation: https://googleapis.dev/python/storage/latest + .. _Google Cloud Tasks: https://pypi.org/project/google-cloud-tasks/ .. _Tasks README: https://github.com/googleapis/google-cloud-python/tree/master/tasks +.. _Tasks Documentation: https://googleapis.dev/python/cloudtasks/latest + .. _Google Cloud Translation: https://pypi.org/project/google-cloud-translate/ .. _Translation README: https://github.com/googleapis/google-cloud-python/tree/master/translate +.. _Translation Documentation: https://googleapis.dev/python/translation/latest + .. _Google Cloud Scheduler: https://pypi.org/project/google-cloud-scheduler/ .. _Scheduler README: https://github.com/googleapis/google-cloud-python/tree/master/scheduler +.. _Scheduler Documentation: https://googleapis.dev/python/cloudscheduler/latest + .. _Stackdriver Logging: https://pypi.org/project/google-cloud-logging/ .. _Logging README: https://github.com/googleapis/google-cloud-python/tree/master/logging +.. _Logging Documentation: https://googleapis.dev/python/logging/latest Beta Support ------------ @@ -78,16 +103,21 @@ against beta libraries are addressed with a higher priority. The following client libraries have **beta** support: -- `Google Cloud Firestore`_ (`Firestore README`_) -- `Google Cloud Video Intelligence`_ (`Video Intelligence README`_) -- `Google Cloud Vision`_ (`Vision README`_) +- `Google Cloud Firestore`_ (`Firestore README`_, `Firestore Documentation`_) +- `Google Cloud Video Intelligence`_ (`Video Intelligence README`_, `Video Intelligence Documentation`_) +- `Google Cloud Vision`_ (`Vision README`_, `Vision Documentation`_) .. _Google Cloud Firestore: https://pypi.org/project/google-cloud-firestore/ .. _Firestore README: https://github.com/googleapis/google-cloud-python/tree/master/firestore +.. _Firestore Documentation: https://googleapis.dev/python/firestore/latest + .. _Google Cloud Video Intelligence: https://pypi.org/project/google-cloud-videointelligence .. _Video Intelligence README: https://github.com/googleapis/google-cloud-python/tree/master/videointelligence +.. _Video Intelligence Documentation: https://googleapis.dev/python/videointelligence/latest + .. _Google Cloud Vision: https://pypi.org/project/google-cloud-vision/ .. _Vision README: https://github.com/googleapis/google-cloud-python/tree/master/vision +.. _Vision Documentation: https://googleapis.dev/python/vision/latest Alpha Support @@ -99,64 +129,111 @@ updates. See `versioning`_ for more details. The following client libraries have **alpha** support: -- `Google Cloud Asset`_ (`Asset README`_) -- `Google Cloud AutoML`_ (`AutoML README`_) -- `Google BigQuery Data Transfer`_ (`BigQuery Data Transfer README`_) -- `Google Cloud Bigtable - HappyBase`_ (`HappyBase README`_) -- `Google Cloud Container`_ (`Container README`_) -- `Google Cloud Container Analysis`_ (`Container Analysis README`_) -- `Google Cloud Dataproc`_ (`Dataproc README`_) -- `Google Cloud DLP`_ (`DLP README`_) -- `Google Cloud DNS`_ (`DNS README`_) -- `Google Cloud IoT`_ (`IoT README`_) -- `Google Cloud Memorystore for Redis`_ (`Redis README`_) -- `Google Cloud Resource Manager`_ (`Resource Manager README`_) -- `Google Cloud Runtime Configuration`_ (`Runtime Config README`_) -- `Google Cloud Security Scanner`_ (`Security Scanner README`_ ) -- `Google Cloud Trace`_ (`Trace README`_) -- `Google Cloud Text-to-Speech`_ (`Text-to-Speech README`_) -- `Grafeas`_ (`Grafeas README`_) -- `Stackdriver Error Reporting`_ (`Error Reporting README`_) -- `Stackdriver Monitoring`_ (`Monitoring README`_) +- `Google Cloud Asset`_ (`Asset README`_, `Asset Documentation`_) +- `Google Cloud AutoML`_ (`AutoML README`_, `AutoML Documentation`_) +- `Google BigQuery Data Transfer`_ (`BigQuery Data Transfer README`_, `BigQuery Documentation`_) +- `Google Cloud Bigtable - HappyBase`_ (`HappyBase README`_, `HappyBase Documentation`_) +- `Google Cloud Build`_ (`Cloud Build README`_, `Cloud Build Documentation`_) +- `Google Cloud Container`_ (`Container README`_, `Container Documentation`_) +- `Google Cloud Container Analysis`_ (`Container Analysis README`_, `Container Analysis Documentation`_) +- `Google Cloud Dataproc`_ (`Dataproc README`_, `Dataproc Documentation`_) +- `Google Cloud DLP`_ (`DLP README`_, `DLP Documentation`_) +- `Google Cloud DNS`_ (`DNS README`_, `DNS Documentation`_) +- `Google Cloud IoT`_ (`IoT README`_, `IoT Documentation`_) +- `Google Cloud Memorystore for Redis`_ (`Redis README`_, `Redis Documentation`_) +- `Google Cloud Recommender`_ (`Recommender README`_, `Recommender Documentation`_) +- `Google Cloud Resource Manager`_ (`Resource Manager README`_, `Resource Manager Documentation`_) +- `Google Cloud Runtime Configuration`_ (`Runtime Config README`_, `Runtime Config Documentation`_) +- `Google Cloud Security Scanner`_ (`Security Scanner README`_ , `Security Scanner Documentation`_) +- `Google Cloud Trace`_ (`Trace README`_, `Trace Documentation`_) +- `Google Cloud Text-to-Speech`_ (`Text-to-Speech README`_, `Text-to-Speech Documentation`_) +- `Grafeas`_ (`Grafeas README`_, `Grafeas Documentation`_) +- `Stackdriver Error Reporting`_ (`Error Reporting README`_, `Error Reporting Documentation`_) +- `Stackdriver Monitoring`_ (`Monitoring README`_, `Monitoring Documentation`_) .. _Google Cloud Asset: https://pypi.org/project/google-cloud-asset/ .. _Asset README: https://github.com/googleapis/google-cloud-python/blob/master/asset +.. _Asset Documentation: https://googleapis.dev/python/cloudasset/latest + .. _Google Cloud AutoML: https://pypi.org/project/google-cloud-automl/ .. _AutoML README: https://github.com/googleapis/google-cloud-python/blob/master/automl +.. _AutoML Documentation: https://googleapis.dev/python/automl/latest + .. _Google BigQuery Data Transfer: https://pypi.org/project/google-cloud-bigquery-datatransfer/ .. _BigQuery Data Transfer README: https://github.com/googleapis/google-cloud-python/tree/master/bigquery_datatransfer +.. _BigQuery Documentation: https://googleapis.dev/python/bigquery/latest + .. _Google Cloud Bigtable - HappyBase: https://pypi.org/project/google-cloud-happybase/ .. _HappyBase README: https://github.com/googleapis/google-cloud-python-happybase +.. _HappyBase Documentation: https://google-cloud-python-happybase.readthedocs.io/en/latest/ + +.. _Google Cloud Build: https://pypi.org/project/google-cloud-build/ +.. _Cloud Build README: https://github.com/googleapis/google-cloud-python/cloudbuild +.. _Cloud Build Documentation: https://googleapis.dev/python/cloudbuild/latest + .. _Google Cloud Container: https://pypi.org/project/google-cloud-container/ .. _Container README: https://github.com/googleapis/google-cloud-python/tree/master/container +.. _Container Documentation: https://googleapis.dev/python/container/latest + .. _Google Cloud Container Analysis: https://pypi.org/project/google-cloud-containeranalysis/ .. _Container Analysis README: https://github.com/googleapis/google-cloud-python/tree/master/containeranalysis +.. _Container Analysis Documentation: https://googleapis.dev/python/containeranalysis/latest + .. _Google Cloud Dataproc: https://pypi.org/project/google-cloud-dataproc/ .. _Dataproc README: https://github.com/googleapis/google-cloud-python/tree/master/dataproc +.. _Dataproc Documentation: https://googleapis.dev/python/dataproc/latest + .. _Google Cloud DLP: https://pypi.org/project/google-cloud-dlp/ .. _DLP README: https://github.com/googleapis/google-cloud-python/tree/master/dlp +.. _DLP Documentation: https://googleapis.dev/python/dlp/latest + .. _Google Cloud DNS: https://pypi.org/project/google-cloud-dns/ .. _DNS README: https://github.com/googleapis/google-cloud-python/tree/master/dns +.. _DNS Documentation: https://googleapis.dev/python/dns/latest + .. _Google Cloud IoT: https://pypi.org/project/google-cloud-iot/ .. _IoT README: https://github.com/googleapis/google-cloud-python/tree/master/iot +.. _IoT Documentation: https://googleapis.dev/python/cloudiot/latest + .. _Google Cloud Memorystore for Redis: https://pypi.org/project/google-cloud-redis/ .. _Redis README: https://github.com/googleapis/google-cloud-python/tree/master/redis +.. _Redis Documentation: https://googleapis.dev/python/redis/latest + +.. _Google Cloud Recommender: https://pypi.org/project/google-cloud-recommender/ +.. _Recommender README: https://github.com/googleapis/google-cloud-python/tree/master/recommender +.. _Recommender Documentation: https://googleapis.dev/python/recommender/latest + .. _Google Cloud Resource Manager: https://pypi.org/project/google-cloud-resource-manager/ .. _Resource Manager README: https://github.com/googleapis/google-cloud-python/tree/master/resource_manager +.. _Resource Manager Documentation: https://googleapis.dev/python/cloudresourcemanager/latest + .. _Google Cloud Runtime Configuration: https://pypi.org/project/google-cloud-runtimeconfig/ .. _Runtime Config README: https://github.com/googleapis/google-cloud-python/tree/master/runtimeconfig +.. _Runtime Config Documentation: https://googleapis.dev/python/runtimeconfig/latest + .. _Google Cloud Security Scanner: https://pypi.org/project/google-cloud-websecurityscanner/ .. _Security Scanner README: https://github.com/googleapis/google-cloud-python/blob/master/websecurityscanner +.. _Security Scanner Documentation: https://googleapis.dev/python/websecurityscanner/latest + .. _Google Cloud Text-to-Speech: https://pypi.org/project/google-cloud-texttospeech/ .. _Text-to-Speech README: https://github.com/googleapis/google-cloud-python/tree/master/texttospeech +.. _Text-to-Speech Documentation: https://googleapis.dev/python/texttospeech/latest + .. _Google Cloud Trace: https://pypi.org/project/google-cloud-trace/ .. _Trace README: https://github.com/googleapis/google-cloud-python/tree/master/trace +.. _Trace Documentation: https://googleapis.dev/python/cloudtrace/latest + .. _Grafeas: https://pypi.org/project/grafeas/ .. _Grafeas README: https://github.com/googleapis/google-cloud-python/tree/master/grafeas +.. _Grafeas Documentation: https://googleapis.dev/python/grafeas/latest + .. _Stackdriver Error Reporting: https://pypi.org/project/google-cloud-error-reporting/ .. _Error Reporting README: https://github.com/googleapis/google-cloud-python/tree/master/error_reporting +.. _Error Reporting Documentation: https://googleapis.dev/python/clouderrorreporting/latest + .. _Stackdriver Monitoring: https://pypi.org/project/google-cloud-monitoring/ .. _Monitoring README: https://github.com/googleapis/google-cloud-python/tree/master/monitoring +.. _Monitoring Documentation: https://googleapis.dev/python/monitoring/latest .. _versioning: https://github.com/googleapis/google-cloud-python/blob/master/CONTRIBUTING.rst#versioning @@ -185,7 +262,7 @@ Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html +.. _Authentication section: https://googleapis.dev/python/google-api-core/latest/auth.html .. _authentication document: https://github.com/googleapis/google-cloud-common/tree/master/authentication Contributing diff --git a/api_core/CHANGELOG.md b/api_core/CHANGELOG.md index 74ab16564dbc..1b1ec96d717b 100644 --- a/api_core/CHANGELOG.md +++ b/api_core/CHANGELOG.md @@ -4,6 +4,25 @@ [1]: https://pypi.org/project/google-api-core/#history +## 1.14.3 + +10-07-2019 10:35 PDT + + +### Implementation Changes +- Finalize during close of 'ResumableBidiRpc' ([#9337](https://github.com/googleapis/google-cloud-python/pull/9337)) +- add on_error to Retry.__init__ ([#8892](https://github.com/googleapis/google-cloud-python/pull/8892)) +- Fix race in 'BackgroundConsumer._thread_main'. ([#8883](https://github.com/googleapis/google-cloud-python/pull/8883)) + +### Documentation +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Fix broken links in docs. ([#9148](https://github.com/googleapis/google-cloud-python/pull/9148)) +- About of time -> amount of time ([#9052](https://github.com/googleapis/google-cloud-python/pull/9052)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) + +### Internal / Testing Changes +- Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) + ## 1.14.2 07-30-2019 14:08 PDT diff --git a/api_core/docs/auth.rst b/api_core/docs/auth.rst index d0e84c65ecac..cec7c16ddf29 100644 --- a/api_core/docs/auth.rst +++ b/api_core/docs/auth.rst @@ -177,22 +177,6 @@ described above, so be sure none of the other possible environments conflict with your user provided credentials. -Advanced users of `oauth2client`_ can also use custom flows to -create credentials using `client secrets`_ or using a -`webserver flow`_. -After creation, :class:`Credentials ` -can be serialized with -:meth:`to_json() ` -and stored in a file and then and deserialized with -:meth:`from_json() `. In order -to use ``oauth2client``'s credentials with this library, you'll need to -`convert them`_. - -.. _oauth2client: https://github.com/Google/oauth2client -.. _client secrets: https://developers.google.com/api-client-library/python/guide/aaa_oauth#flow_from_clientsecrets -.. _webserver flow: https://developers.google.com/api-client-library/python/guide/aaa_oauth#OAuth2WebServerFlow -.. _convert them: http://google-auth.readthedocs.io/en/stable/user-guide.html#user-credentials - Troubleshooting =============== diff --git a/api_core/docs/conf.py b/api_core/docs/conf.py index 60b1f7a3f0b9..b92886d4d0fc 100644 --- a/api_core/docs/conf.py +++ b/api_core/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/api_core/google/api_core/bidi.py b/api_core/google/api_core/bidi.py index f73c7c9dfabc..b171a4112a31 100644 --- a/api_core/google/api_core/bidi.py +++ b/api_core/google/api_core/bidi.py @@ -561,6 +561,10 @@ def _recv(self): def recv(self): return self._recoverable(self._recv) + def close(self): + self._finalize(None) + super(ResumableBidiRpc, self).close() + @property def is_active(self): """bool: True if this stream is currently open and active.""" @@ -698,7 +702,11 @@ def stop(self): if self._thread is not None: # Resume the thread to wake it up in case it is sleeping. self.resume() - self._thread.join() + # The daemonized thread may itself block, so don't wait + # for it longer than a second. + self._thread.join(1.0) + if self._thread.is_alive(): # pragma: NO COVER + _LOGGER.warning("Background thread did not exit.") self._thread = None diff --git a/api_core/google/api_core/datetime_helpers.py b/api_core/google/api_core/datetime_helpers.py index 84c1bb7f512c..d670a12bd3b7 100644 --- a/api_core/google/api_core/datetime_helpers.py +++ b/api_core/google/api_core/datetime_helpers.py @@ -17,6 +17,7 @@ import calendar import datetime import re +import warnings import pytz @@ -115,20 +116,15 @@ def from_iso8601_time(value): def from_rfc3339(value): - """Convert a microsecond-precision timestamp to datetime. - - Args: - value (str): The RFC3339 string to convert. - - Returns: - datetime.datetime: The datetime object equivalent to the timestamp in - UTC. - """ - return datetime.datetime.strptime(value, _RFC3339_MICROS).replace(tzinfo=pytz.utc) +<<<<<<< HEAD + """Convert an RFC3339-format timestamp to a native datetime. - -def from_rfc3339_nanos(value): - """Convert a nanosecond-precision timestamp to a native datetime. + Supported formats include those without fractional seconds, or with + any fraction up to nanosecond precision. +======= + """Convert a nanosecond-precision or if nanoseconds are missing + microsecond-precision timestamp to a native datetime. +>>>>>>> 06bee67b097e485a58415d218f3ac421a04d3d6a .. note:: Python datetimes do not support nanosecond precision; this function @@ -138,12 +134,9 @@ def from_rfc3339_nanos(value): value (str): The RFC3339 string to convert. Returns: - datetime.datetime: The datetime object equivalent to the timestamp in - UTC. + datetime.datetime: The datetime object equivalent to the timestamp + in UTC. - Raises: - ValueError: If the timestamp does not match the RFC 3339 - regular expression. """ with_nanos = _RFC3339_NANOS.match(value) @@ -169,6 +162,37 @@ def from_rfc3339_nanos(value): return bare_seconds.replace(microsecond=micros, tzinfo=pytz.utc) +def from_rfc3339_nanos(value): + """DEPRECATED. Convert a nanosecond-precision timestamp to a native datetime. + + .. note:: + Python datetimes do not support nanosecond precision; this + function therefore truncates such values to microseconds. + + Args: + value (str): The RFC3339 string to convert. + + Returns: + datetime.datetime: The datetime object equivalent to the + timestamp in UTC. + + Raises: + ValueError: If the timestamp does not match the RFC3339 + regular expression. + """ +<<<<<<< HEAD +======= + # Raise deprecation warnings for things we want to go away. + warnings.warn( + "The `from_rfc3339_nanos` function is deprecated" + " use `from_rfc3339` instead.", + DeprecationWarning, + stacklevel=2, + ) +>>>>>>> 06bee67b097e485a58415d218f3ac421a04d3d6a + return from_rfc3339(value) + + def to_rfc3339(value, ignore_zone=True): """Convert a datetime to an RFC3339 timestamp string. @@ -215,22 +239,22 @@ def nanosecond(self): return self._nanosecond def rfc3339(self): - """Return an RFC 3339-compliant timestamp. + """Return an RFC3339-compliant timestamp. Returns: - (str): Timestamp string according to RFC 3339 spec. + (str): Timestamp string according to RFC3339 spec. """ if self._nanosecond == 0: return to_rfc3339(self) - nanos = str(self._nanosecond).rjust(9, '0').rstrip("0") + nanos = str(self._nanosecond).rjust(9, "0").rstrip("0") return "{}.{}Z".format(self.strftime(_RFC3339_NO_FRACTION), nanos) @classmethod def from_rfc3339(cls, stamp): - """Parse RFC 3339-compliant timestamp, preserving nanoseconds. + """Parse RFC3339-compliant timestamp, preserving nanoseconds. Args: - stamp (str): RFC 3339 stamp, with up to nanosecond precision + stamp (str): RFC3339 stamp, with up to nanosecond precision Returns: :class:`DatetimeWithNanoseconds`: @@ -280,7 +304,7 @@ def timestamp_pb(self): @classmethod def from_timestamp_pb(cls, stamp): - """Parse RFC 3339-compliant timestamp, preserving nanoseconds. + """Parse RFC3339-compliant timestamp, preserving nanoseconds. Args: stamp (:class:`~google.protobuf.timestamp_pb2.Timestamp`): timestamp message diff --git a/api_core/google/api_core/page_iterator.py b/api_core/google/api_core/page_iterator.py index 3ac5904399b0..11a92d38f3ce 100644 --- a/api_core/google/api_core/page_iterator.py +++ b/api_core/google/api_core/page_iterator.py @@ -96,14 +96,22 @@ class Page(object): Callable to convert an item from the type in the raw API response into the native object. Will be called with the iterator and a single item. + raw_page Optional[google.protobuf.message.Message]: + The raw page response. """ - def __init__(self, parent, items, item_to_value): + def __init__(self, parent, items, item_to_value, raw_page=None): self._parent = parent self._num_items = len(items) self._remaining = self._num_items self._item_iter = iter(items) self._item_to_value = item_to_value + self._raw_page = raw_page + + @property + def raw_page(self): + """google.protobuf.message.Message""" + return self._raw_page @property def num_items(self): @@ -360,7 +368,7 @@ def _next_page(self): if self._has_next_page(): response = self._get_next_page_response() items = response.get(self._items_key, ()) - page = Page(self, items, self.item_to_value) + page = Page(self, items, self.item_to_value, raw_page=response) self._page_start(self, page, response) self.next_page_token = response.get(self._next_token) return page @@ -527,7 +535,7 @@ def _next_page(self): self.next_page_token = getattr(response, self._response_token_field) items = getattr(response, self._items_field) - page = Page(self, items, self.item_to_value) + page = Page(self, items, self.item_to_value, raw_page=response) return page diff --git a/api_core/setup.py b/api_core/setup.py index 25b7072d91f2..16802fef4623 100644 --- a/api_core/setup.py +++ b/api_core/setup.py @@ -22,7 +22,7 @@ name = "google-api-core" description = "Google API client core library" -version = "1.14.2" +version = "1.14.3" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/api_core/tests/unit/test_bidi.py b/api_core/tests/unit/test_bidi.py index 4d185d3158e4..52215cbde22f 100644 --- a/api_core/tests/unit/test_bidi.py +++ b/api_core/tests/unit/test_bidi.py @@ -597,6 +597,31 @@ def test_recv_failure(self): assert bidi_rpc.is_active is False assert call.cancelled is True + def test_close(self): + call = mock.create_autospec(_CallAndFuture, instance=True) + + def cancel_side_effect(): + call.is_active.return_value = False + + call.cancel.side_effect = cancel_side_effect + start_rpc = mock.create_autospec( + grpc.StreamStreamMultiCallable, instance=True, return_value=call + ) + should_recover = mock.Mock(spec=["__call__"], return_value=False) + bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) + bidi_rpc.open() + + bidi_rpc.close() + + should_recover.assert_not_called() + call.cancel.assert_called_once() + assert bidi_rpc.call == call + assert bidi_rpc.is_active is False + # ensure the request queue was signaled to stop. + assert bidi_rpc.pending_requests == 1 + assert bidi_rpc._request_queue.get() is None + assert bidi_rpc._finalized + def test_reopen_failure_on_rpc_restart(self): error1 = ValueError("1") error2 = ValueError("2") diff --git a/api_core/tests/unit/test_datetime_helpers.py b/api_core/tests/unit/test_datetime_helpers.py index 4d138c88a80c..948723f96ead 100644 --- a/api_core/tests/unit/test_datetime_helpers.py +++ b/api_core/tests/unit/test_datetime_helpers.py @@ -15,6 +15,7 @@ import calendar import datetime +import mock import pytest import pytz @@ -23,6 +24,9 @@ ONE_MINUTE_IN_MICROSECONDS = 60 * 1e6 +MESSAGE = ( + "The `from_rfc3339_nanos` function is deprecated" " use `from_rfc3339` instead." +) def test_utcnow(): @@ -82,18 +86,18 @@ def test_from_rfc3339(): ) -def test_from_rfc3339_with_bad_tz(): - value = "2009-12-17T12:44:32.123456BAD" - - with pytest.raises(ValueError): - datetime_helpers.from_rfc3339(value) - +def test_from_rfc3339_nanos(): + value = "2009-12-17T12:44:32.123456Z" + assert datetime_helpers.from_rfc3339_nanos(value) == datetime.datetime( + 2009, 12, 17, 12, 44, 32, 123456, pytz.utc + ) -def test_from_rfc3339_with_nanos(): - value = "2009-12-17T12:44:32.123456789Z" - with pytest.raises(ValueError): - datetime_helpers.from_rfc3339(value) +def test_from_rfc3339_without_nanos(): + value = "2009-12-17T12:44:32Z" + assert datetime_helpers.from_rfc3339(value) == datetime.datetime( + 2009, 12, 17, 12, 44, 32, 0, pytz.utc + ) def test_from_rfc3339_nanos_without_nanos(): @@ -103,11 +107,37 @@ def test_from_rfc3339_nanos_without_nanos(): ) -def test_from_rfc3339_nanos_with_bad_tz(): - value = "2009-12-17T12:44:32.123456789BAD" +@pytest.mark.parametrize( + "truncated, micros", + [ + ("12345678", 123456), + ("1234567", 123456), + ("123456", 123456), + ("12345", 123450), + ("1234", 123400), + ("123", 123000), + ("12", 120000), + ("1", 100000), + ], +) +def test_from_rfc3339_with_truncated_nanos(truncated, micros): + value = "2009-12-17T12:44:32.{}Z".format(truncated) + assert datetime_helpers.from_rfc3339(value) == datetime.datetime( + 2009, 12, 17, 12, 44, 32, micros, pytz.utc + ) - with pytest.raises(ValueError): - datetime_helpers.from_rfc3339_nanos(value) + +def test_from_rfc3339_nanos_is_deprecated(): + from_rfc3339_patch = mock.patch("google.api_core.datetime_helpers.from_rfc3339") + warnings_patch = mock.patch("warnings.warn") + value = "2009-12-17T12:44:32.123456Z" + + with from_rfc3339_patch as from_rfc3339, warnings_patch as warn: + result = datetime_helpers.from_rfc3339_nanos(value) + + assert result is from_rfc3339.return_value + from_rfc3339.assert_called_once_with(value) + warn.assert_called_once_with(MESSAGE, DeprecationWarning, stacklevel=2) @pytest.mark.parametrize( @@ -130,6 +160,18 @@ def test_from_rfc3339_nanos_with_truncated_nanos(truncated, micros): ) +def test_from_rfc3339_wo_nanos_raise_exception(): + value = "2009-12-17T12:44:32" + with pytest.raises(ValueError): + datetime_helpers.from_rfc3339(value) + + +def test_from_rfc3339_w_nanos_raise_exception(): + value = "2009-12-17T12:44:32.123456" + with pytest.raises(ValueError): + datetime_helpers.from_rfc3339(value) + + def test_to_rfc3339(): value = datetime.datetime(2016, 4, 5, 13, 30, 0) expected = "2016-04-05T13:30:00.000000Z" @@ -157,10 +199,11 @@ def test_to_rfc3339_with_non_utc_ignore_zone(): class Test_DateTimeWithNanos(object): - @staticmethod def test_ctor_wo_nanos(): - stamp = datetime_helpers.DatetimeWithNanoseconds(2016, 12, 20, 21, 13, 47, 123456) + stamp = datetime_helpers.DatetimeWithNanoseconds( + 2016, 12, 20, 21, 13, 47, 123456 + ) assert stamp.year == 2016 assert stamp.month == 12 assert stamp.day == 20 @@ -200,7 +243,9 @@ def test_ctor_w_micros_keyword_and_nanos(): @staticmethod def test_rfc3339_wo_nanos(): - stamp = datetime_helpers.DatetimeWithNanoseconds(2016, 12, 20, 21, 13, 47, 123456) + stamp = datetime_helpers.DatetimeWithNanoseconds( + 2016, 12, 20, 21, 13, 47, 123456 + ) assert stamp.rfc3339() == "2016-12-20T21:13:47.123456Z" @staticmethod @@ -285,12 +330,16 @@ def test_from_rfc3339_w_full_precision(): ) def test_from_rfc3339_test_nanoseconds(fractional, nanos): value = "2009-12-17T12:44:32.{}Z".format(fractional) - assert datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(value).nanosecond == nanos + assert ( + datetime_helpers.DatetimeWithNanoseconds.from_rfc3339(value).nanosecond + == nanos + ) @staticmethod def test_timestamp_pb_wo_nanos_naive(): stamp = datetime_helpers.DatetimeWithNanoseconds( - 2016, 12, 20, 21, 13, 47, 123456) + 2016, 12, 20, 21, 13, 47, 123456 + ) delta = stamp.replace(tzinfo=pytz.UTC) - datetime_helpers._UTC_EPOCH seconds = int(delta.total_seconds()) nanos = 123456000 @@ -304,7 +353,8 @@ def test_timestamp_pb_w_nanos(): ) delta = stamp - datetime_helpers._UTC_EPOCH timestamp = timestamp_pb2.Timestamp( - seconds=int(delta.total_seconds()), nanos=123456789) + seconds=int(delta.total_seconds()), nanos=123456789 + ) assert stamp.timestamp_pb() == timestamp @staticmethod @@ -314,8 +364,7 @@ def test_from_timestamp_pb_wo_nanos(): seconds = int(delta.total_seconds()) timestamp = timestamp_pb2.Timestamp(seconds=seconds) - stamp = datetime_helpers.DatetimeWithNanoseconds.from_timestamp_pb( - timestamp) + stamp = datetime_helpers.DatetimeWithNanoseconds.from_timestamp_pb(timestamp) assert _to_seconds(when) == _to_seconds(stamp) assert stamp.microsecond == 0 @@ -329,8 +378,7 @@ def test_from_timestamp_pb_w_nanos(): seconds = int(delta.total_seconds()) timestamp = timestamp_pb2.Timestamp(seconds=seconds, nanos=123456789) - stamp = datetime_helpers.DatetimeWithNanoseconds.from_timestamp_pb( - timestamp) + stamp = datetime_helpers.DatetimeWithNanoseconds.from_timestamp_pb(timestamp) assert _to_seconds(when) == _to_seconds(stamp) assert stamp.microsecond == 123456 diff --git a/api_core/tests/unit/test_page_iterator.py b/api_core/tests/unit/test_page_iterator.py index 6335001bcf41..2bf742492889 100644 --- a/api_core/tests/unit/test_page_iterator.py +++ b/api_core/tests/unit/test_page_iterator.py @@ -36,9 +36,10 @@ def test_constructor(self): assert page.remaining == 3 assert page._parent is parent assert page._item_to_value is item_to_value + assert page.raw_page is None def test___iter__(self): - page = page_iterator.Page(None, (), None) + page = page_iterator.Page(None, (), None, None) assert iter(page) is page def test_iterator_calls_parent_item_to_value(self): @@ -69,6 +70,18 @@ def test_iterator_calls_parent_item_to_value(self): item_to_value.assert_called_with(parent, 12) assert page.remaining == 97 + def test_raw_page(self): + parent = mock.sentinel.parent + item_to_value = mock.sentinel.item_to_value + + raw_page = mock.sentinel.raw_page + + page = page_iterator.Page(parent, (1, 2, 3), item_to_value, raw_page=raw_page) + assert page.raw_page is raw_page + + with pytest.raises(AttributeError): + page.raw_page = None + class PageIteratorImpl(page_iterator.Iterator): def _next_page(self): @@ -116,8 +129,7 @@ def test_pages_property_restart(self): def test__page_iter_increment(self): iterator = PageIteratorImpl(None, None) page = page_iterator.Page( - iterator, ("item",), page_iterator._item_to_value_identity - ) + iterator, ("item",), page_iterator._item_to_value_identity) iterator._next_page = mock.Mock(side_effect=[page, None]) assert iterator.num_results == 0 @@ -147,11 +159,9 @@ def test__items_iter(self): # Make pages from mock responses parent = mock.sentinel.parent page1 = page_iterator.Page( - parent, (item1, item2), page_iterator._item_to_value_identity - ) + parent, (item1, item2), page_iterator._item_to_value_identity) page2 = page_iterator.Page( - parent, (item3,), page_iterator._item_to_value_identity - ) + parent, (item3,), page_iterator._item_to_value_identity) iterator = PageIteratorImpl(None, None) iterator._next_page = mock.Mock(side_effect=[page1, page2, None]) diff --git a/asset/CHANGELOG.md b/asset/CHANGELOG.md index 049d0f2c3017..3f362e050225 100644 --- a/asset/CHANGELOG.md +++ b/asset/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-asset/#history +## 0.5.0 + +10-29-2019 14:26 PDT + +### New Features +- Add `bigquery_destination` to `OutputConfig`; make `content_type` optional argument to `BatchGetAssetsHistoryRequest`; add `uri_prefix` to `GcsDestination`; add `ORG_POLICY` and `ACCESS_POLICY` content type enums ([#9555](https://github.com/googleapis/google-cloud-python/pull/9555)) + +### Documentation +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) + +### Internal / Testing Changes +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages; use googleapis.dev for api_core refs ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) + ## 0.4.1 08-12-2019 13:44 PDT diff --git a/asset/docs/conf.py b/asset/docs/conf.py index cb7ef3bdb856..462c1ad9623f 100644 --- a/asset/docs/conf.py +++ b/asset/docs/conf.py @@ -332,7 +332,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/asset/google/cloud/asset_v1/gapic/asset_service_client.py b/asset/google/cloud/asset_v1/gapic/asset_service_client.py index ab9078cc62c7..e4b3c18bc799 100644 --- a/asset/google/cloud/asset_v1/gapic/asset_service_client.py +++ b/asset/google/cloud/asset_v1/gapic/asset_service_client.py @@ -254,7 +254,7 @@ def export_assets( asset_types (list[str]): A list of asset types of which to take a snapshot for. For example: "compute.googleapis.com/Disk". If specified, only matching assets will be returned. See `Introduction to Cloud Asset - Inventory `__ + Inventory `__ for all supported asset types. content_type (~google.cloud.asset_v1.types.ContentType): Asset content type. If not specified, no content but the asset name will be returned. @@ -357,7 +357,7 @@ def batch_get_assets_history( parent (str): Required. The relative name of the root asset. It can only be an organization number (such as "organizations/123"), a project ID (such as "projects/my-project-id")", or a project number (such as "projects/12345"). - content_type (~google.cloud.asset_v1.types.ContentType): Required. The content type. + content_type (~google.cloud.asset_v1.types.ContentType): Optional. The content type. read_time_window (Union[dict, ~google.cloud.asset_v1.types.TimeWindow]): Optional. The time window for the asset history. Both start\_time and end\_time are optional and if set, it must be after 2018-10-02 UTC. If end\_time is not set, it is default to current timestamp. If start\_time @@ -372,7 +372,7 @@ def batch_get_assets_history( See `Resource Names `__ and `Resource Name - Format `__ + Format `__ for more info. The request becomes a no-op if the asset name list is empty, and the max diff --git a/asset/google/cloud/asset_v1/gapic/enums.py b/asset/google/cloud/asset_v1/gapic/enums.py index 780beae4ddef..38eb45ae0103 100644 --- a/asset/google/cloud/asset_v1/gapic/enums.py +++ b/asset/google/cloud/asset_v1/gapic/enums.py @@ -27,11 +27,15 @@ class ContentType(enum.IntEnum): CONTENT_TYPE_UNSPECIFIED (int): Unspecified content type. RESOURCE (int): Resource metadata. IAM_POLICY (int): The actual IAM policy set on a resource. + ORG_POLICY (int): The Cloud Organization Policy set on an asset. + ACCESS_POLICY (int): The Cloud Access context mananger Policy set on an asset. """ CONTENT_TYPE_UNSPECIFIED = 0 RESOURCE = 1 IAM_POLICY = 2 + ORG_POLICY = 4 + ACCESS_POLICY = 5 class NullValue(enum.IntEnum): diff --git a/asset/google/cloud/asset_v1/proto/asset_service.proto b/asset/google/cloud/asset_v1/proto/asset_service.proto index 0dfc2898b131..33dde9cd365b 100644 --- a/asset/google/cloud/asset_v1/proto/asset_service.proto +++ b/asset/google/cloud/asset_v1/proto/asset_service.proto @@ -18,8 +18,13 @@ syntax = "proto3"; package google.cloud.asset.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/asset/v1/assets.proto"; import "google/longrunning/operations.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; option csharp_namespace = "Google.Cloud.Asset.V1"; @@ -29,9 +34,11 @@ option java_outer_classname = "AssetServiceProto"; option java_package = "com.google.cloud.asset.v1"; option php_namespace = "Google\\Cloud\\Asset\\V1"; - // Asset service definition. service AssetService { + option (google.api.default_host) = "cloudasset.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Exports assets with time and resource types to a given Cloud Storage // location. The output format is newline-delimited JSON. // This API implements the [google.longrunning.Operation][google.longrunning.Operation] API allowing you @@ -41,6 +48,10 @@ service AssetService { post: "/v1/{parent=*/*}:exportAssets" body: "*" }; + option (google.longrunning.operation_info) = { + response_type: "google.cloud.asset.v1.ExportAssetsResponse" + metadata_type: "google.cloud.asset.v1.ExportAssetsRequest" + }; } // Batch gets the update history of assets that overlap a time window. @@ -63,7 +74,12 @@ message ExportAssetsRequest { // organization number (such as "organizations/123"), a project ID (such as // "projects/my-project-id"), or a project number (such as "projects/12345"), // or a folder number (such as "folders/123"). - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "cloudasset.googleapis.com/Asset" + } + ]; // Timestamp to take an asset snapshot. This can only be set to a timestamp // between 2018-10-02 UTC (inclusive) and the current time. If not specified, @@ -73,9 +89,9 @@ message ExportAssetsRequest { google.protobuf.Timestamp read_time = 2; // A list of asset types of which to take a snapshot for. For example: - // "compute.googleapis.com/Disk". If specified, only matching assets will be returned. - // See [Introduction to Cloud Asset - // Inventory](https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/overview) + // "compute.googleapis.com/Disk". If specified, only matching assets will be + // returned. See [Introduction to Cloud Asset + // Inventory](https://cloud.google.com/asset-inventory/docs/overview) // for all supported asset types. repeated string asset_types = 3; @@ -85,7 +101,7 @@ message ExportAssetsRequest { // Required. Output configuration indicating where the results will be output // to. All results will be in newline delimited JSON format. - OutputConfig output_config = 5; + OutputConfig output_config = 5 [(google.api.field_behavior) = REQUIRED]; } // The export asset response. This message is returned by the @@ -105,21 +121,27 @@ message BatchGetAssetsHistoryRequest { // Required. The relative name of the root asset. It can only be an // organization number (such as "organizations/123"), a project ID (such as // "projects/my-project-id")", or a project number (such as "projects/12345"). - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "cloudasset.googleapis.com/Asset" + } + ]; // A list of the full names of the assets. For example: // `//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`. // See [Resource // Names](https://cloud.google.com/apis/design/resource_names#full_resource_name) - // and [Resource Name Format](https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/resource-name-format) + // and [Resource Name + // Format](https://cloud.google.com/asset-inventory/docs/resource-name-format) // for more info. // // The request becomes a no-op if the asset name list is empty, and the max // size of the asset name list is 100 in one request. repeated string asset_names = 2; - // Required. The content type. - ContentType content_type = 3; + // Optional. The content type. + ContentType content_type = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. The time window for the asset history. Both start_time and // end_time are optional and if set, it must be after 2018-10-02 UTC. If @@ -127,7 +149,7 @@ message BatchGetAssetsHistoryRequest { // not set, the snapshot of the assets at end_time will be returned. The // returned results contain all temporal assets whose time window overlap with // read_time_window. - TimeWindow read_time_window = 4; + TimeWindow read_time_window = 4 [(google.api.field_behavior) = OPTIONAL]; } // Batch get assets history response. @@ -142,6 +164,12 @@ message OutputConfig { oneof destination { // Destination on Cloud Storage. GcsDestination gcs_destination = 1; + + // Destination on BigQuery. The output table stores the fields in asset + // proto as columns in BigQuery. The resource/iam_policy field is converted + // to a record with each field to a column, except metadata to a single JSON + // string. + BigQueryDestination bigquery_destination = 2; } } @@ -155,9 +183,40 @@ message GcsDestination { // Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata) // for more information. string uri = 1; + + // The uri prefix of all generated Cloud Storage objects. For example: + // "gs://bucket_name/object_name_prefix". Each object uri is in format: + // "gs://bucket_name/object_name_prefix// and only + // contains assets for that type. starts from 0. For example: + // "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is + // the first shard of output objects containing all + // compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be + // returned if file with the same name "gs://bucket_name/object_name_prefix" + // already exists. + string uri_prefix = 2; } } +// A BigQuery destination. +message BigQueryDestination { + // Required. The BigQuery dataset in format + // "projects/projectId/datasets/datasetId", to which the snapshot result + // should be exported. If this dataset does not exist, the export call returns + // an error. + string dataset = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The BigQuery table to which the snapshot result should be + // written. If this table does not exist, a new table with the given name + // will be created. + string table = 2 [(google.api.field_behavior) = REQUIRED]; + + // If the destination table already exists and this flag is `TRUE`, the + // table will be overwritten by the contents of assets snapshot. If the flag + // is not set and the destination table already exists, the export call + // returns an error. + bool force = 3; +} + // Asset content type. enum ContentType { // Unspecified content type. @@ -168,4 +227,10 @@ enum ContentType { // The actual IAM policy set on a resource. IAM_POLICY = 2; + + // The Cloud Organization Policy set on an asset. + ORG_POLICY = 4; + + // The Cloud Access context mananger Policy set on an asset. + ACCESS_POLICY = 5; } diff --git a/asset/google/cloud/asset_v1/proto/asset_service_pb2.py b/asset/google/cloud/asset_v1/proto/asset_service_pb2.py index 869d4dc3fee1..b382992b04c8 100644 --- a/asset/google/cloud/asset_v1/proto/asset_service_pb2.py +++ b/asset/google/cloud/asset_v1/proto/asset_service_pb2.py @@ -17,12 +17,17 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.asset_v1.proto import ( assets_pb2 as google_dot_cloud_dot_asset__v1_dot_proto_dot_assets__pb2, ) from google.longrunning import ( operations_pb2 as google_dot_longrunning_dot_operations__pb2, ) +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -34,12 +39,17 @@ "\n\031com.google.cloud.asset.v1B\021AssetServiceProtoP\001Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\252\002\025Google.Cloud.Asset.V1\312\002\025Google\\Cloud\\Asset\\V1" ), serialized_pb=_b( - '\n/google/cloud/asset_v1/proto/asset_service.proto\x12\x15google.cloud.asset.v1\x1a\x1cgoogle/api/annotations.proto\x1a(google/cloud/asset_v1/proto/assets.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xdf\x01\n\x13\x45xportAssetsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12-\n\tread_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x13\n\x0b\x61sset_types\x18\x03 \x03(\t\x12\x38\n\x0c\x63ontent_type\x18\x04 \x01(\x0e\x32".google.cloud.asset.v1.ContentType\x12:\n\routput_config\x18\x05 \x01(\x0b\x32#.google.cloud.asset.v1.OutputConfig"\x81\x01\n\x14\x45xportAssetsResponse\x12-\n\tread_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\routput_config\x18\x02 \x01(\x0b\x32#.google.cloud.asset.v1.OutputConfig"\xba\x01\n\x1c\x42\x61tchGetAssetsHistoryRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x13\n\x0b\x61sset_names\x18\x02 \x03(\t\x12\x38\n\x0c\x63ontent_type\x18\x03 \x01(\x0e\x32".google.cloud.asset.v1.ContentType\x12;\n\x10read_time_window\x18\x04 \x01(\x0b\x32!.google.cloud.asset.v1.TimeWindow"U\n\x1d\x42\x61tchGetAssetsHistoryResponse\x12\x34\n\x06\x61ssets\x18\x01 \x03(\x0b\x32$.google.cloud.asset.v1.TemporalAsset"_\n\x0cOutputConfig\x12@\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32%.google.cloud.asset.v1.GcsDestinationH\x00\x42\r\n\x0b\x64\x65stination"-\n\x0eGcsDestination\x12\r\n\x03uri\x18\x01 \x01(\tH\x00\x42\x0c\n\nobject_uri*I\n\x0b\x43ontentType\x12\x1c\n\x18\x43ONTENT_TYPE_UNSPECIFIED\x10\x00\x12\x0c\n\x08RESOURCE\x10\x01\x12\x0e\n\nIAM_POLICY\x10\x02\x32\xc9\x02\n\x0c\x41ssetService\x12\x83\x01\n\x0c\x45xportAssets\x12*.google.cloud.asset.v1.ExportAssetsRequest\x1a\x1d.google.longrunning.Operation"(\x82\xd3\xe4\x93\x02""\x1d/v1/{parent=*/*}:exportAssets:\x01*\x12\xb2\x01\n\x15\x42\x61tchGetAssetsHistory\x12\x33.google.cloud.asset.v1.BatchGetAssetsHistoryRequest\x1a\x34.google.cloud.asset.v1.BatchGetAssetsHistoryResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{parent=*/*}:batchGetAssetsHistoryB\x9c\x01\n\x19\x63om.google.cloud.asset.v1B\x11\x41ssetServiceProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\xaa\x02\x15Google.Cloud.Asset.V1\xca\x02\x15Google\\Cloud\\Asset\\V1b\x06proto3' + '\n/google/cloud/asset_v1/proto/asset_service.proto\x12\x15google.cloud.asset.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a(google/cloud/asset_v1/proto/assets.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x8d\x02\n\x13\x45xportAssetsRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudasset.googleapis.com/Asset\x12-\n\tread_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x13\n\x0b\x61sset_types\x18\x03 \x03(\t\x12\x38\n\x0c\x63ontent_type\x18\x04 \x01(\x0e\x32".google.cloud.asset.v1.ContentType\x12?\n\routput_config\x18\x05 \x01(\x0b\x32#.google.cloud.asset.v1.OutputConfigB\x03\xe0\x41\x02"\x81\x01\n\x14\x45xportAssetsResponse\x12-\n\tread_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\routput_config\x18\x02 \x01(\x0b\x32#.google.cloud.asset.v1.OutputConfig"\xed\x01\n\x1c\x42\x61tchGetAssetsHistoryRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudasset.googleapis.com/Asset\x12\x13\n\x0b\x61sset_names\x18\x02 \x03(\t\x12=\n\x0c\x63ontent_type\x18\x03 \x01(\x0e\x32".google.cloud.asset.v1.ContentTypeB\x03\xe0\x41\x01\x12@\n\x10read_time_window\x18\x04 \x01(\x0b\x32!.google.cloud.asset.v1.TimeWindowB\x03\xe0\x41\x01"U\n\x1d\x42\x61tchGetAssetsHistoryResponse\x12\x34\n\x06\x61ssets\x18\x01 \x03(\x0b\x32$.google.cloud.asset.v1.TemporalAsset"\xab\x01\n\x0cOutputConfig\x12@\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32%.google.cloud.asset.v1.GcsDestinationH\x00\x12J\n\x14\x62igquery_destination\x18\x02 \x01(\x0b\x32*.google.cloud.asset.v1.BigQueryDestinationH\x00\x42\r\n\x0b\x64\x65stination"C\n\x0eGcsDestination\x12\r\n\x03uri\x18\x01 \x01(\tH\x00\x12\x14\n\nuri_prefix\x18\x02 \x01(\tH\x00\x42\x0c\n\nobject_uri"N\n\x13\x42igQueryDestination\x12\x14\n\x07\x64\x61taset\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x12\n\x05table\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\r\n\x05\x66orce\x18\x03 \x01(\x08*l\n\x0b\x43ontentType\x12\x1c\n\x18\x43ONTENT_TYPE_UNSPECIFIED\x10\x00\x12\x0c\n\x08RESOURCE\x10\x01\x12\x0e\n\nIAM_POLICY\x10\x02\x12\x0e\n\nORG_POLICY\x10\x04\x12\x11\n\rACCESS_POLICY\x10\x05\x32\xf3\x03\n\x0c\x41ssetService\x12\xde\x01\n\x0c\x45xportAssets\x12*.google.cloud.asset.v1.ExportAssetsRequest\x1a\x1d.google.longrunning.Operation"\x82\x01\x82\xd3\xe4\x93\x02""\x1d/v1/{parent=*/*}:exportAssets:\x01*\xca\x41W\n*google.cloud.asset.v1.ExportAssetsResponse\x12)google.cloud.asset.v1.ExportAssetsRequest\x12\xb2\x01\n\x15\x42\x61tchGetAssetsHistory\x12\x33.google.cloud.asset.v1.BatchGetAssetsHistoryRequest\x1a\x34.google.cloud.asset.v1.BatchGetAssetsHistoryResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{parent=*/*}:batchGetAssetsHistory\x1aM\xca\x41\x19\x63loudasset.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x9c\x01\n\x19\x63om.google.cloud.asset.v1B\x11\x41ssetServiceProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\xaa\x02\x15Google.Cloud.Asset.V1\xca\x02\x15Google\\Cloud\\Asset\\V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_asset__v1_dot_proto_dot_assets__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, + google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, ], ) @@ -63,11 +73,17 @@ _descriptor.EnumValueDescriptor( name="IAM_POLICY", index=2, number=2, serialized_options=None, type=None ), + _descriptor.EnumValueDescriptor( + name="ORG_POLICY", index=3, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ACCESS_POLICY", index=4, number=5, serialized_options=None, type=None + ), ], containing_type=None, serialized_options=None, - serialized_start=994, - serialized_end=1067, + serialized_start=1418, + serialized_end=1526, ) _sym_db.RegisterEnumDescriptor(_CONTENTTYPE) @@ -75,6 +91,8 @@ CONTENT_TYPE_UNSPECIFIED = 0 RESOURCE = 1 IAM_POLICY = 2 +ORG_POLICY = 4 +ACCESS_POLICY = 5 _EXPORTASSETSREQUEST = _descriptor.Descriptor( @@ -99,7 +117,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A!\022\037cloudasset.googleapis.com/Asset" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -171,7 +191,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -183,8 +203,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=217, - serialized_end=440, + serialized_start=365, + serialized_end=634, ) @@ -240,8 +260,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=443, - serialized_end=572, + serialized_start=637, + serialized_end=766, ) @@ -267,7 +287,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A!\022\037cloudasset.googleapis.com/Asset" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -303,7 +325,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -321,7 +343,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -333,8 +355,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=575, - serialized_end=761, + serialized_start=769, + serialized_end=1006, ) @@ -372,8 +394,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=763, - serialized_end=848, + serialized_start=1008, + serialized_end=1093, ) @@ -401,7 +423,25 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), + _descriptor.FieldDescriptor( + name="bigquery_destination", + full_name="google.cloud.asset.v1.OutputConfig.bigquery_destination", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -419,8 +459,8 @@ fields=[], ) ], - serialized_start=850, - serialized_end=945, + serialized_start=1096, + serialized_end=1267, ) @@ -448,7 +488,25 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), + _descriptor.FieldDescriptor( + name="uri_prefix", + full_name="google.cloud.asset.v1.GcsDestination.uri_prefix", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -466,8 +524,83 @@ fields=[], ) ], - serialized_start=947, - serialized_end=992, + serialized_start=1269, + serialized_end=1336, +) + + +_BIGQUERYDESTINATION = _descriptor.Descriptor( + name="BigQueryDestination", + full_name="google.cloud.asset.v1.BigQueryDestination", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="dataset", + full_name="google.cloud.asset.v1.BigQueryDestination.dataset", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="table", + full_name="google.cloud.asset.v1.BigQueryDestination.table", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="force", + full_name="google.cloud.asset.v1.BigQueryDestination.force", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1338, + serialized_end=1416, ) _EXPORTASSETSREQUEST.fields_by_name[ @@ -487,18 +620,31 @@ "assets" ].message_type = google_dot_cloud_dot_asset__v1_dot_proto_dot_assets__pb2._TEMPORALASSET _OUTPUTCONFIG.fields_by_name["gcs_destination"].message_type = _GCSDESTINATION +_OUTPUTCONFIG.fields_by_name["bigquery_destination"].message_type = _BIGQUERYDESTINATION _OUTPUTCONFIG.oneofs_by_name["destination"].fields.append( _OUTPUTCONFIG.fields_by_name["gcs_destination"] ) _OUTPUTCONFIG.fields_by_name[ "gcs_destination" ].containing_oneof = _OUTPUTCONFIG.oneofs_by_name["destination"] +_OUTPUTCONFIG.oneofs_by_name["destination"].fields.append( + _OUTPUTCONFIG.fields_by_name["bigquery_destination"] +) +_OUTPUTCONFIG.fields_by_name[ + "bigquery_destination" +].containing_oneof = _OUTPUTCONFIG.oneofs_by_name["destination"] _GCSDESTINATION.oneofs_by_name["object_uri"].fields.append( _GCSDESTINATION.fields_by_name["uri"] ) _GCSDESTINATION.fields_by_name["uri"].containing_oneof = _GCSDESTINATION.oneofs_by_name[ "object_uri" ] +_GCSDESTINATION.oneofs_by_name["object_uri"].fields.append( + _GCSDESTINATION.fields_by_name["uri_prefix"] +) +_GCSDESTINATION.fields_by_name[ + "uri_prefix" +].containing_oneof = _GCSDESTINATION.oneofs_by_name["object_uri"] DESCRIPTOR.message_types_by_name["ExportAssetsRequest"] = _EXPORTASSETSREQUEST DESCRIPTOR.message_types_by_name["ExportAssetsResponse"] = _EXPORTASSETSRESPONSE DESCRIPTOR.message_types_by_name[ @@ -509,6 +655,7 @@ ] = _BATCHGETASSETSHISTORYRESPONSE DESCRIPTOR.message_types_by_name["OutputConfig"] = _OUTPUTCONFIG DESCRIPTOR.message_types_by_name["GcsDestination"] = _GCSDESTINATION +DESCRIPTOR.message_types_by_name["BigQueryDestination"] = _BIGQUERYDESTINATION DESCRIPTOR.enum_types_by_name["ContentType"] = _CONTENTTYPE _sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -539,9 +686,8 @@ A list of asset types of which to take a snapshot for. For example: "compute.googleapis.com/Disk". If specified, only matching assets will be returned. See `Introduction to Cloud - Asset Inventory `__ for all - supported asset types. + Asset Inventory `__ for all supported asset types. content_type: Asset content type. If not specified, no content but the asset name will be returned. @@ -600,13 +746,12 @@ te.googleapis.com/projects/my_project_123/zones/zone1/instance s/instance1``. See `Resource Names `__ and `Resource - Name Format `__ - for more info. The request becomes a no-op if the asset name - list is empty, and the max size of the asset name list is 100 - in one request. + Name Format `__ for more info. The + request becomes a no-op if the asset name list is empty, and + the max size of the asset name list is 100 in one request. content_type: - Required. The content type. + Optional. The content type. read_time_window: Optional. The time window for the asset history. Both start\_time and end\_time are optional and if set, it must be @@ -653,6 +798,11 @@ Asset export destination. gcs_destination: Destination on Cloud Storage. + bigquery_destination: + Destination on BigQuery. The output table stores the fields in + asset proto as columns in BigQuery. The resource/iam\_policy + field is converted to a record with each field to a column, + except metadata to a single JSON string. """, # @@protoc_insertion_point(class_scope:google.cloud.asset.v1.OutputConfig) ), @@ -677,23 +827,72 @@ See `Viewing and Editing Object Metadata `__ for more information. + uri_prefix: + The uri prefix of all generated Cloud Storage objects. For + example: "gs://bucket\_name/object\_name\_prefix". Each object + uri is in format: "gs://bucket\_name/object\_name\_prefix// + and only contains assets for that type. starts from 0. For + example: "gs://bucket\_name/object\_name\_prefix/compute.googl + eapis.com/Disk/0" is the first shard of output objects + containing all compute.googleapis.com/Disk assets. An + INVALID\_ARGUMENT error will be returned if file with the same + name "gs://bucket\_name/object\_name\_prefix" already exists. """, # @@protoc_insertion_point(class_scope:google.cloud.asset.v1.GcsDestination) ), ) _sym_db.RegisterMessage(GcsDestination) +BigQueryDestination = _reflection.GeneratedProtocolMessageType( + "BigQueryDestination", + (_message.Message,), + dict( + DESCRIPTOR=_BIGQUERYDESTINATION, + __module__="google.cloud.asset_v1.proto.asset_service_pb2", + __doc__="""A BigQuery destination. + + + Attributes: + dataset: + Required. The BigQuery dataset in format + "projects/projectId/datasets/datasetId", to which the snapshot + result should be exported. If this dataset does not exist, the + export call returns an error. + table: + Required. The BigQuery table to which the snapshot result + should be written. If this table does not exist, a new table + with the given name will be created. + force: + If the destination table already exists and this flag is + ``TRUE``, the table will be overwritten by the contents of + assets snapshot. If the flag is not set and the destination + table already exists, the export call returns an error. + """, + # @@protoc_insertion_point(class_scope:google.cloud.asset.v1.BigQueryDestination) + ), +) +_sym_db.RegisterMessage(BigQueryDestination) + DESCRIPTOR._options = None +_EXPORTASSETSREQUEST.fields_by_name["parent"]._options = None +_EXPORTASSETSREQUEST.fields_by_name["output_config"]._options = None +_BATCHGETASSETSHISTORYREQUEST.fields_by_name["parent"]._options = None +_BATCHGETASSETSHISTORYREQUEST.fields_by_name["content_type"]._options = None +_BATCHGETASSETSHISTORYREQUEST.fields_by_name["read_time_window"]._options = None +_BIGQUERYDESTINATION.fields_by_name["dataset"]._options = None +_BIGQUERYDESTINATION.fields_by_name["table"]._options = None _ASSETSERVICE = _descriptor.ServiceDescriptor( name="AssetService", full_name="google.cloud.asset.v1.AssetService", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1070, - serialized_end=1399, + serialized_options=_b( + "\312A\031cloudasset.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1529, + serialized_end=2028, methods=[ _descriptor.MethodDescriptor( name="ExportAssets", @@ -703,7 +902,7 @@ input_type=_EXPORTASSETSREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002""\035/v1/{parent=*/*}:exportAssets:\001*' + '\202\323\344\223\002""\035/v1/{parent=*/*}:exportAssets:\001*\312AW\n*google.cloud.asset.v1.ExportAssetsResponse\022)google.cloud.asset.v1.ExportAssetsRequest' ), ), _descriptor.MethodDescriptor( diff --git a/asset/google/cloud/asset_v1/proto/assets.proto b/asset/google/cloud/asset_v1/proto/assets.proto index f6a8108c0bd2..e689b761822c 100644 --- a/asset/google/cloud/asset_v1/proto/assets.proto +++ b/asset/google/cloud/asset_v1/proto/assets.proto @@ -18,11 +18,13 @@ syntax = "proto3"; package google.cloud.asset.v1; import "google/api/annotations.proto"; +import "google/api/resource.proto"; import "google/iam/v1/policy.proto"; import "google/protobuf/any.proto"; import "google/protobuf/struct.proto"; import "google/protobuf/timestamp.proto"; +option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Asset.V1"; option go_package = "google.golang.org/genproto/googleapis/cloud/asset/v1;asset"; option java_multiple_files = true; @@ -30,7 +32,6 @@ option java_outer_classname = "AssetProto"; option java_package = "com.google.cloud.asset.v1"; option php_namespace = "Google\\Cloud\\Asset\\V1"; - // Temporal asset. In addition to the asset, the temporal asset includes the // status of the asset and valid from and to time of it. message TemporalAsset { @@ -57,6 +58,11 @@ message TimeWindow { // Cloud asset. This includes all Google Cloud Platform resources, // Cloud IAM policies, and other non-GCP assets. message Asset { + option (google.api.resource) = { + type: "cloudasset.googleapis.com/Asset" + pattern: "*" + }; + // The full name of the asset. For example: // `//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`. // See [Resource diff --git a/asset/google/cloud/asset_v1/proto/assets_pb2.py b/asset/google/cloud/asset_v1/proto/assets_pb2.py index 99fba5cde79d..0b37f61584ae 100644 --- a/asset/google/cloud/asset_v1/proto/assets_pb2.py +++ b/asset/google/cloud/asset_v1/proto/assets_pb2.py @@ -16,6 +16,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 @@ -27,13 +28,14 @@ package="google.cloud.asset.v1", syntax="proto3", serialized_options=_b( - "\n\031com.google.cloud.asset.v1B\nAssetProtoP\001Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\252\002\025Google.Cloud.Asset.V1\312\002\025Google\\Cloud\\Asset\\V1" + "\n\031com.google.cloud.asset.v1B\nAssetProtoP\001Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\370\001\001\252\002\025Google.Cloud.Asset.V1\312\002\025Google\\Cloud\\Asset\\V1" ), serialized_pb=_b( - '\n(google/cloud/asset_v1/proto/assets.proto\x12\x15google.cloud.asset.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x80\x01\n\rTemporalAsset\x12\x31\n\x06window\x18\x01 \x01(\x0b\x32!.google.cloud.asset.v1.TimeWindow\x12\x0f\n\x07\x64\x65leted\x18\x02 \x01(\x08\x12+\n\x05\x61sset\x18\x03 \x01(\x0b\x32\x1c.google.cloud.asset.v1.Asset"j\n\nTimeWindow\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\x87\x01\n\x05\x41sset\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nasset_type\x18\x02 \x01(\t\x12\x31\n\x08resource\x18\x03 \x01(\x0b\x32\x1f.google.cloud.asset.v1.Resource\x12)\n\niam_policy\x18\x04 \x01(\x0b\x32\x15.google.iam.v1.Policy"\xa0\x01\n\x08Resource\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x1e\n\x16\x64iscovery_document_uri\x18\x02 \x01(\t\x12\x16\n\x0e\x64iscovery_name\x18\x03 \x01(\t\x12\x14\n\x0cresource_url\x18\x04 \x01(\t\x12\x0e\n\x06parent\x18\x05 \x01(\t\x12%\n\x04\x64\x61ta\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructB\x95\x01\n\x19\x63om.google.cloud.asset.v1B\nAssetProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\xaa\x02\x15Google.Cloud.Asset.V1\xca\x02\x15Google\\Cloud\\Asset\\V1b\x06proto3' + '\n(google/cloud/asset_v1/proto/assets.proto\x12\x15google.cloud.asset.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x80\x01\n\rTemporalAsset\x12\x31\n\x06window\x18\x01 \x01(\x0b\x32!.google.cloud.asset.v1.TimeWindow\x12\x0f\n\x07\x64\x65leted\x18\x02 \x01(\x08\x12+\n\x05\x61sset\x18\x03 \x01(\x0b\x32\x1c.google.cloud.asset.v1.Asset"j\n\nTimeWindow\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xb0\x01\n\x05\x41sset\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nasset_type\x18\x02 \x01(\t\x12\x31\n\x08resource\x18\x03 \x01(\x0b\x32\x1f.google.cloud.asset.v1.Resource\x12)\n\niam_policy\x18\x04 \x01(\x0b\x32\x15.google.iam.v1.Policy:\'\xea\x41$\n\x1f\x63loudasset.googleapis.com/Asset\x12\x01*"\xa0\x01\n\x08Resource\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x1e\n\x16\x64iscovery_document_uri\x18\x02 \x01(\t\x12\x16\n\x0e\x64iscovery_name\x18\x03 \x01(\t\x12\x14\n\x0cresource_url\x18\x04 \x01(\t\x12\x0e\n\x06parent\x18\x05 \x01(\t\x12%\n\x04\x64\x61ta\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructB\x98\x01\n\x19\x63om.google.cloud.asset.v1B\nAssetProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\xf8\x01\x01\xaa\x02\x15Google.Cloud.Asset.V1\xca\x02\x15Google\\Cloud\\Asset\\V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_iam_dot_v1_dot_policy__pb2.DESCRIPTOR, google_dot_protobuf_dot_any__pb2.DESCRIPTOR, google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, @@ -112,8 +114,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=216, - serialized_end=344, + serialized_start=243, + serialized_end=371, ) @@ -169,8 +171,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=346, - serialized_end=452, + serialized_start=373, + serialized_end=479, ) @@ -257,13 +259,13 @@ extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b("\352A$\n\037cloudasset.googleapis.com/Asset\022\001*"), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=455, - serialized_end=590, + serialized_start=482, + serialized_end=658, ) @@ -391,8 +393,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=593, - serialized_end=753, + serialized_start=661, + serialized_end=821, ) _TEMPORALASSET.fields_by_name["window"].message_type = _TIMEWINDOW @@ -541,4 +543,5 @@ DESCRIPTOR._options = None +_ASSET._options = None # @@protoc_insertion_point(module_scope) diff --git a/asset/setup.py b/asset/setup.py index a70843c8e1de..583cff983676 100644 --- a/asset/setup.py +++ b/asset/setup.py @@ -21,7 +21,7 @@ name = "google-cloud-asset" description = "Cloud Asset API API client library" -version = "0.4.1" +version = "0.5.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/asset/synth.metadata b/asset/synth.metadata index 2a27fdfa4cf0..630bcf259589 100644 --- a/asset/synth.metadata +++ b/asset/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:11:34.528326Z", + "updateTime": "2019-10-29T12:12:19.326302Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/automl/CHANGELOG.md b/automl/CHANGELOG.md index e991a616462e..2534c9441f0e 100644 --- a/automl/CHANGELOG.md +++ b/automl/CHANGELOG.md @@ -4,6 +4,39 @@ [1]: https://pypi.org/project/google-cloud-automl/#history +## 0.7.1 + +10-29-2019 13:45 PDT + + +### Implementation Changes +- Pass credentials to underlying clients in TableClient ([#9491](https://github.com/googleapis/google-cloud-python/pull/9491)) + +## 0.7.0 + +10-04-2019 15:37 PDT + +### Implementation Changes +- Return operation future from `AutoMlClient.create_dataset` (via synth).([#9423](https://github.com/googleapis/google-cloud-python/pull/9423)) + + +### New Features +- Add support for V1 API (via synth). ([#9388](https://github.com/googleapis/google-cloud-python/pull/9388)) +- Add support for passing project to 'GcsClient'. ([#9299](https://github.com/googleapis/google-cloud-python/pull/9299)) + +## 0.6.0 + +09-30-2019 10:40 PDT + +### New Features +- Add 'image_classification_model_deployment_metadata' arg to 'AutoMlClient.deploy_model' (via synth). ([#9291](https://github.com/googleapis/google-cloud-python/pull/9291)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) + +### Internal / Testing Changes +- Preserve GcsClient, 'pandas' extras in testing (via synth). ([#9179](https://github.com/googleapis/google-cloud-python/pull/9179)) + ## 0.5.0 08-28-2019 14:07 PDT diff --git a/automl/docs/conf.py b/automl/docs/conf.py index 4eb565b9bf75..9ac18387deed 100644 --- a/automl/docs/conf.py +++ b/automl/docs/conf.py @@ -318,7 +318,7 @@ u"google-cloud-automl Documentation", author, "google-cloud-automl", - "GAPIC library for the {metadata.shortName} v1beta1 service", + "GAPIC library for the {metadata.shortName} v1 service", "APIs", ) ] @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/automl/docs/gapic/v1/api.rst b/automl/docs/gapic/v1/api.rst new file mode 100644 index 000000000000..757fc1a0f456 --- /dev/null +++ b/automl/docs/gapic/v1/api.rst @@ -0,0 +1,6 @@ +Client for Cloud AutoML API +=========================== + +.. automodule:: google.cloud.automl_v1 + :members: + :inherited-members: \ No newline at end of file diff --git a/automl/docs/gapic/v1/types.rst b/automl/docs/gapic/v1/types.rst new file mode 100644 index 000000000000..5fd25134fc13 --- /dev/null +++ b/automl/docs/gapic/v1/types.rst @@ -0,0 +1,5 @@ +Types for Cloud AutoML API Client +================================= + +.. automodule:: google.cloud.automl_v1.types + :members: \ No newline at end of file diff --git a/automl/docs/index.rst b/automl/docs/index.rst index 01f577642cb1..90c2bfd56246 100644 --- a/automl/docs/index.rst +++ b/automl/docs/index.rst @@ -1,8 +1,22 @@ .. include:: README.rst +This package includes clients for multiple versions of the Cloud AutoML API. +By default, you will get ``v1``, the latest stable version. -Api Reference -------------- +v1 API Reference +------------------------ +.. toctree:: + :maxdepth: 2 + + gapic/v1/api + gapic/v1/types + +Previous beta release v1beta1 is provided as well. + +An API and type reference is provided for ``v1beta1``: + +v1beta1 API Reference +---------------------- .. toctree:: :maxdepth: 2 @@ -11,6 +25,7 @@ Api Reference gapic/v1beta1/tables + Changelog --------- diff --git a/automl/google/cloud/automl.py b/automl/google/cloud/automl.py index 77528b3d1dbf..9f96f4f44dc7 100644 --- a/automl/google/cloud/automl.py +++ b/automl/google/cloud/automl.py @@ -17,10 +17,10 @@ from __future__ import absolute_import -from google.cloud.automl_v1beta1 import AutoMlClient -from google.cloud.automl_v1beta1 import PredictionServiceClient -from google.cloud.automl_v1beta1 import enums -from google.cloud.automl_v1beta1 import types +from google.cloud.automl_v1 import AutoMlClient +from google.cloud.automl_v1 import PredictionServiceClient +from google.cloud.automl_v1 import enums +from google.cloud.automl_v1 import types __all__ = ("enums", "types", "AutoMlClient", "PredictionServiceClient") diff --git a/automl/google/cloud/automl_v1/__init__.py b/automl/google/cloud/automl_v1/__init__.py new file mode 100644 index 000000000000..f68180a567ab --- /dev/null +++ b/automl/google/cloud/automl_v1/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.cloud.automl_v1 import types +from google.cloud.automl_v1.gapic import auto_ml_client +from google.cloud.automl_v1.gapic import enums +from google.cloud.automl_v1.gapic import prediction_service_client + + +class AutoMlClient(auto_ml_client.AutoMlClient): + __doc__ = auto_ml_client.AutoMlClient.__doc__ + enums = enums + + +class PredictionServiceClient(prediction_service_client.PredictionServiceClient): + __doc__ = prediction_service_client.PredictionServiceClient.__doc__ + enums = enums + + +__all__ = ("enums", "types", "AutoMlClient", "PredictionServiceClient") diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin/common/__init__.py b/automl/google/cloud/automl_v1/gapic/__init__.py similarity index 100% rename from oslogin/google/cloud/oslogin_v1/proto/oslogin/common/__init__.py rename to automl/google/cloud/automl_v1/gapic/__init__.py diff --git a/automl/google/cloud/automl_v1/gapic/auto_ml_client.py b/automl/google/cloud/automl_v1/gapic/auto_ml_client.py new file mode 100644 index 000000000000..eebed1ee3831 --- /dev/null +++ b/automl/google/cloud/automl_v1/gapic/auto_ml_client.py @@ -0,0 +1,1514 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.cloud.automl.v1 AutoMl API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.operation +import google.api_core.operations_v1 +import google.api_core.page_iterator +import google.api_core.path_template +import grpc + +from google.cloud.automl_v1.gapic import auto_ml_client_config +from google.cloud.automl_v1.gapic import enums +from google.cloud.automl_v1.gapic.transports import auto_ml_grpc_transport +from google.cloud.automl_v1.proto import dataset_pb2 +from google.cloud.automl_v1.proto import io_pb2 +from google.cloud.automl_v1.proto import model_evaluation_pb2 +from google.cloud.automl_v1.proto import model_pb2 +from google.cloud.automl_v1.proto import operations_pb2 as proto_operations_pb2 +from google.cloud.automl_v1.proto import service_pb2 +from google.cloud.automl_v1.proto import service_pb2_grpc +from google.longrunning import operations_pb2 as longrunning_operations_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-automl").version + + +class AutoMlClient(object): + """ + AutoML Server API. + + The resource names are assigned by the server. The server never reuses + names that it has created after the resources with those names are + deleted. + + An ID of a resource is the last element of the item's resource name. For + ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, + then the id for the item is ``{dataset_id}``. + + Currently the only supported ``location_id`` is "us-central1". + + On any input that is documented to expect a string parameter in + snake\_case or kebab-case, either of those cases is accepted. + """ + + SERVICE_ADDRESS = "automl.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.cloud.automl.v1.AutoMl" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoMlClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def dataset_path(cls, project, location, dataset): + """Return a fully-qualified dataset string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/datasets/{dataset}", + project=project, + location=location, + dataset=dataset, + ) + + @classmethod + def location_path(cls, project, location): + """Return a fully-qualified location string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}", + project=project, + location=location, + ) + + @classmethod + def model_path(cls, project, location, model): + """Return a fully-qualified model string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/models/{model}", + project=project, + location=location, + model=model, + ) + + @classmethod + def model_evaluation_path(cls, project, location, model, model_evaluation): + """Return a fully-qualified model_evaluation string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}", + project=project, + location=location, + model=model, + model_evaluation=model_evaluation, + ) + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.AutoMlGrpcTransport, + Callable[[~.Credentials, type], ~.AutoMlGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = auto_ml_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=auto_ml_grpc_transport.AutoMlGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = auto_ml_grpc_transport.AutoMlGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def create_dataset( + self, + parent, + dataset, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates a dataset. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize `dataset`: + >>> dataset = {} + >>> + >>> response = client.create_dataset(parent, dataset) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + parent (str): The resource name of the project to create the dataset for. + dataset (Union[dict, ~google.cloud.automl_v1.types.Dataset]): The dataset to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.Dataset` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_dataset" not in self._inner_api_calls: + self._inner_api_calls[ + "create_dataset" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_dataset, + default_retry=self._method_configs["CreateDataset"].retry, + default_timeout=self._method_configs["CreateDataset"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.CreateDatasetRequest(parent=parent, dataset=dataset) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["create_dataset"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + dataset_pb2.Dataset, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def update_dataset( + self, + dataset, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Updates a dataset. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> # TODO: Initialize `dataset`: + >>> dataset = {} + >>> + >>> # TODO: Initialize `update_mask`: + >>> update_mask = {} + >>> + >>> response = client.update_dataset(dataset, update_mask) + + Args: + dataset (Union[dict, ~google.cloud.automl_v1.types.Dataset]): The dataset which replaces the resource on the server. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.Dataset` + update_mask (Union[dict, ~google.cloud.automl_v1.types.FieldMask]): Required. The update mask applies to the resource. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.Dataset` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "update_dataset" not in self._inner_api_calls: + self._inner_api_calls[ + "update_dataset" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_dataset, + default_retry=self._method_configs["UpdateDataset"].retry, + default_timeout=self._method_configs["UpdateDataset"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.UpdateDatasetRequest( + dataset=dataset, update_mask=update_mask + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("dataset.name", dataset.name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["update_dataset"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_dataset( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets a dataset. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') + >>> + >>> response = client.get_dataset(name) + + Args: + name (str): The resource name of the dataset to retrieve. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.Dataset` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_dataset" not in self._inner_api_calls: + self._inner_api_calls[ + "get_dataset" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_dataset, + default_retry=self._method_configs["GetDataset"].retry, + default_timeout=self._method_configs["GetDataset"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.GetDatasetRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_dataset"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_datasets( + self, + parent, + filter_=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists datasets in a project. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # Iterate over all results + >>> for element in client.list_datasets(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_datasets(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): The resource name of the project from which to list datasets. + filter_ (str): An expression for filtering the results of the request. + + - ``dataset_metadata`` - for existence of the case (e.g. + image\_classification\_dataset\_metadata:\*). Some examples of using + the filter are: + + - ``translation_dataset_metadata:*`` --> The dataset has + translation\_dataset\_metadata. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.automl_v1.types.Dataset` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_datasets" not in self._inner_api_calls: + self._inner_api_calls[ + "list_datasets" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_datasets, + default_retry=self._method_configs["ListDatasets"].retry, + default_timeout=self._method_configs["ListDatasets"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ListDatasetsRequest( + parent=parent, filter=filter_, page_size=page_size + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_datasets"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="datasets", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def delete_dataset( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes a dataset and all of its contents. Returns empty response in the + ``response`` field when it completes, and ``delete_details`` in the + ``metadata`` field. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') + >>> + >>> response = client.delete_dataset(name) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): The resource name of the dataset to delete. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_dataset" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_dataset" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_dataset, + default_retry=self._method_configs["DeleteDataset"].retry, + default_timeout=self._method_configs["DeleteDataset"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.DeleteDatasetRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["delete_dataset"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def import_data( + self, + name, + input_config, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Imports data into a dataset. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') + >>> + >>> # TODO: Initialize `input_config`: + >>> input_config = {} + >>> + >>> response = client.import_data(name, input_config) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): Required. Dataset name. Dataset must already exist. All imported + annotations and examples will be added. + input_config (Union[dict, ~google.cloud.automl_v1.types.InputConfig]): Required. The desired input location and its domain specific semantics, + if any. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.InputConfig` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "import_data" not in self._inner_api_calls: + self._inner_api_calls[ + "import_data" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.import_data, + default_retry=self._method_configs["ImportData"].retry, + default_timeout=self._method_configs["ImportData"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ImportDataRequest(name=name, input_config=input_config) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["import_data"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def export_data( + self, + name, + output_config, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Exports dataset's data to the provided output location. Returns an empty + response in the ``response`` field when it completes. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') + >>> + >>> # TODO: Initialize `output_config`: + >>> output_config = {} + >>> + >>> response = client.export_data(name, output_config) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): Required. The resource name of the dataset. + output_config (Union[dict, ~google.cloud.automl_v1.types.OutputConfig]): Required. The desired output location. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.OutputConfig` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "export_data" not in self._inner_api_calls: + self._inner_api_calls[ + "export_data" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.export_data, + default_retry=self._method_configs["ExportData"].retry, + default_timeout=self._method_configs["ExportData"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ExportDataRequest(name=name, output_config=output_config) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["export_data"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def create_model( + self, + parent, + model, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates a model. Returns a Model in the ``response`` field when it + completes. When you create a model, several model evaluations are + created for it: a global evaluation, and one evaluation for each + annotation spec. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize `model`: + >>> model = {} + >>> + >>> response = client.create_model(parent, model) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + parent (str): Resource name of the parent project where the model is being created. + model (Union[dict, ~google.cloud.automl_v1.types.Model]): The model to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.Model` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_model" not in self._inner_api_calls: + self._inner_api_calls[ + "create_model" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_model, + default_retry=self._method_configs["CreateModel"].retry, + default_timeout=self._method_configs["CreateModel"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.CreateModelRequest(parent=parent, model=model) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["create_model"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + model_pb2.Model, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def get_model( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets a model. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') + >>> + >>> response = client.get_model(name) + + Args: + name (str): Resource name of the model. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.Model` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_model" not in self._inner_api_calls: + self._inner_api_calls[ + "get_model" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_model, + default_retry=self._method_configs["GetModel"].retry, + default_timeout=self._method_configs["GetModel"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.GetModelRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_model"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def update_model( + self, + model, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Updates a model. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> # TODO: Initialize `model`: + >>> model = {} + >>> + >>> # TODO: Initialize `update_mask`: + >>> update_mask = {} + >>> + >>> response = client.update_model(model, update_mask) + + Args: + model (Union[dict, ~google.cloud.automl_v1.types.Model]): The model which replaces the resource on the server. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.Model` + update_mask (Union[dict, ~google.cloud.automl_v1.types.FieldMask]): Required. The update mask applies to the resource. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.Model` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "update_model" not in self._inner_api_calls: + self._inner_api_calls[ + "update_model" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_model, + default_retry=self._method_configs["UpdateModel"].retry, + default_timeout=self._method_configs["UpdateModel"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.UpdateModelRequest(model=model, update_mask=update_mask) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("model.name", model.name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["update_model"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_models( + self, + parent, + filter_=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists models. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # Iterate over all results + >>> for element in client.list_models(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_models(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Resource name of the project, from which to list the models. + filter_ (str): An expression for filtering the results of the request. + + - ``model_metadata`` - for existence of the case (e.g. + video\_classification\_model\_metadata:\*). + + - ``dataset_id`` - for = or !=. Some examples of using the filter are: + + - ``image_classification_model_metadata:*`` --> The model has + image\_classification\_model\_metadata. + + - ``dataset_id=5`` --> The model was created from a dataset with ID 5. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.automl_v1.types.Model` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_models" not in self._inner_api_calls: + self._inner_api_calls[ + "list_models" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_models, + default_retry=self._method_configs["ListModels"].retry, + default_timeout=self._method_configs["ListModels"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ListModelsRequest( + parent=parent, filter=filter_, page_size=page_size + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_models"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="model", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def delete_model( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes a model. Returns ``google.protobuf.Empty`` in the ``response`` + field when it completes, and ``delete_details`` in the ``metadata`` + field. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') + >>> + >>> response = client.delete_model(name) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): Resource name of the model being deleted. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_model" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_model" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_model, + default_retry=self._method_configs["DeleteModel"].retry, + default_timeout=self._method_configs["DeleteModel"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.DeleteModelRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["delete_model"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def get_model_evaluation( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets a model evaluation. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.model_evaluation_path('[PROJECT]', '[LOCATION]', '[MODEL]', '[MODEL_EVALUATION]') + >>> + >>> response = client.get_model_evaluation(name) + + Args: + name (str): Resource name for the model evaluation. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.ModelEvaluation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_model_evaluation" not in self._inner_api_calls: + self._inner_api_calls[ + "get_model_evaluation" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_model_evaluation, + default_retry=self._method_configs["GetModelEvaluation"].retry, + default_timeout=self._method_configs["GetModelEvaluation"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.GetModelEvaluationRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_model_evaluation"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_model_evaluations( + self, + parent, + filter_, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists model evaluations. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') + >>> + >>> # TODO: Initialize `filter_`: + >>> filter_ = '' + >>> + >>> # Iterate over all results + >>> for element in client.list_model_evaluations(parent, filter_): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_model_evaluations(parent, filter_).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Resource name of the model to list the model evaluations for. + If modelId is set as "-", this will list model evaluations from across all + models of the parent location. + filter_ (str): An expression for filtering the results of the request. + + - ``annotation_spec_id`` - for =, != or existence. See example below + for the last. + + Some examples of using the filter are: + + - ``annotation_spec_id!=4`` --> The model evaluation was done for + annotation spec with ID different than 4. + - ``NOT annotation_spec_id:*`` --> The model evaluation was done for + aggregate of all annotation specs. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.automl_v1.types.ModelEvaluation` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_model_evaluations" not in self._inner_api_calls: + self._inner_api_calls[ + "list_model_evaluations" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_model_evaluations, + default_retry=self._method_configs["ListModelEvaluations"].retry, + default_timeout=self._method_configs["ListModelEvaluations"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ListModelEvaluationsRequest( + parent=parent, filter=filter_, page_size=page_size + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_model_evaluations"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="model_evaluation", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator diff --git a/automl/google/cloud/automl_v1/gapic/auto_ml_client_config.py b/automl/google/cloud/automl_v1/gapic/auto_ml_client_config.py new file mode 100644 index 000000000000..6822a905d8bf --- /dev/null +++ b/automl/google/cloud/automl_v1/gapic/auto_ml_client_config.py @@ -0,0 +1,93 @@ +config = { + "interfaces": { + "google.cloud.automl.v1.AutoMl": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "CreateDataset": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "UpdateDataset": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetDataset": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListDatasets": { + "timeout_millis": 50000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteDataset": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ImportData": { + "timeout_millis": 20000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "ExportData": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "CreateModel": { + "timeout_millis": 20000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetModel": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "UpdateModel": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "ListModels": { + "timeout_millis": 50000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteModel": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "GetModelEvaluation": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListModelEvaluations": { + "timeout_millis": 50000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/automl/google/cloud/automl_v1/gapic/enums.py b/automl/google/cloud/automl_v1/gapic/enums.py new file mode 100644 index 000000000000..d9c50d5677e5 --- /dev/null +++ b/automl/google/cloud/automl_v1/gapic/enums.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class Model(object): + class DeploymentState(enum.IntEnum): + """ + Deployment state of the model. + + Attributes: + DEPLOYMENT_STATE_UNSPECIFIED (int): Should not be used, an un-set enum has this value by default. + DEPLOYED (int): Model is deployed. + UNDEPLOYED (int): Model is not deployed. + """ + + DEPLOYMENT_STATE_UNSPECIFIED = 0 + DEPLOYED = 1 + UNDEPLOYED = 2 diff --git a/automl/google/cloud/automl_v1/gapic/prediction_service_client.py b/automl/google/cloud/automl_v1/gapic/prediction_service_client.py new file mode 100644 index 000000000000..274d7cf6db76 --- /dev/null +++ b/automl/google/cloud/automl_v1/gapic/prediction_service_client.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.cloud.automl.v1 PredictionService API.""" + +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.path_template +import grpc + +from google.cloud.automl_v1.gapic import enums +from google.cloud.automl_v1.gapic import prediction_service_client_config +from google.cloud.automl_v1.gapic.transports import prediction_service_grpc_transport +from google.cloud.automl_v1.proto import data_items_pb2 +from google.cloud.automl_v1.proto import dataset_pb2 +from google.cloud.automl_v1.proto import io_pb2 +from google.cloud.automl_v1.proto import model_evaluation_pb2 +from google.cloud.automl_v1.proto import model_pb2 +from google.cloud.automl_v1.proto import operations_pb2 as proto_operations_pb2 +from google.cloud.automl_v1.proto import prediction_service_pb2 +from google.cloud.automl_v1.proto import prediction_service_pb2_grpc +from google.cloud.automl_v1.proto import service_pb2 +from google.cloud.automl_v1.proto import service_pb2_grpc +from google.longrunning import operations_pb2 as longrunning_operations_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-automl").version + + +class PredictionServiceClient(object): + """ + AutoML Prediction API. + + On any input that is documented to expect a string parameter in + snake\_case or kebab-case, either of those cases is accepted. + """ + + SERVICE_ADDRESS = "automl.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.cloud.automl.v1.PredictionService" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def model_path(cls, project, location, model): + """Return a fully-qualified model string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/models/{model}", + project=project, + location=location, + model=model, + ) + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.PredictionServiceGrpcTransport, + Callable[[~.Credentials, type], ~.PredictionServiceGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = prediction_service_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=prediction_service_grpc_transport.PredictionServiceGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = prediction_service_grpc_transport.PredictionServiceGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def predict( + self, + name, + payload, + params=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Perform an online prediction. The prediction result will be directly + returned in the response. Available for following ML problems, and their + expected request payloads: + + - Translation - TextSnippet, content up to 25,000 characters, UTF-8 + encoded. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.PredictionServiceClient() + >>> + >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') + >>> + >>> # TODO: Initialize `payload`: + >>> payload = {} + >>> + >>> response = client.predict(name, payload) + + Args: + name (str): Name of the model requested to serve the prediction. + payload (Union[dict, ~google.cloud.automl_v1.types.ExamplePayload]): Required. Payload to perform a prediction on. The payload must match the + problem type that the model was trained to solve. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.ExamplePayload` + params (dict[str -> str]): Additional domain-specific parameters, any string must be up to 25000 + characters long. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.PredictResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "predict" not in self._inner_api_calls: + self._inner_api_calls[ + "predict" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.predict, + default_retry=self._method_configs["Predict"].retry, + default_timeout=self._method_configs["Predict"].timeout, + client_info=self._client_info, + ) + + request = prediction_service_pb2.PredictRequest( + name=name, payload=payload, params=params + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["predict"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/automl/google/cloud/automl_v1/gapic/prediction_service_client_config.py b/automl/google/cloud/automl_v1/gapic/prediction_service_client_config.py new file mode 100644 index 000000000000..21fc698d48db --- /dev/null +++ b/automl/google/cloud/automl_v1/gapic/prediction_service_client_config.py @@ -0,0 +1,28 @@ +config = { + "interfaces": { + "google.cloud.automl.v1.PredictionService": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "Predict": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + } + }, + } + } +} diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/__init__.py b/automl/google/cloud/automl_v1/gapic/transports/__init__.py similarity index 100% rename from oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/__init__.py rename to automl/google/cloud/automl_v1/gapic/transports/__init__.py diff --git a/automl/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py b/automl/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py new file mode 100644 index 000000000000..e07d24ffbbe4 --- /dev/null +++ b/automl/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py @@ -0,0 +1,304 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers +import google.api_core.operations_v1 + +from google.cloud.automl_v1.proto import service_pb2_grpc + + +class AutoMlGrpcTransport(object): + """gRPC transport class providing stubs for + google.cloud.automl.v1 AutoMl API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, channel=None, credentials=None, address="automl.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = {"auto_ml_stub": service_pb2_grpc.AutoMlStub(channel)} + + # Because this API includes a method that returns a + # long-running operation (proto: google.longrunning.Operation), + # instantiate an LRO client. + self._operations_client = google.api_core.operations_v1.OperationsClient( + channel + ) + + @classmethod + def create_channel( + cls, address="automl.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def create_dataset(self): + """Return the gRPC stub for :meth:`AutoMlClient.create_dataset`. + + Creates a dataset. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].CreateDataset + + @property + def update_dataset(self): + """Return the gRPC stub for :meth:`AutoMlClient.update_dataset`. + + Updates a dataset. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].UpdateDataset + + @property + def get_dataset(self): + """Return the gRPC stub for :meth:`AutoMlClient.get_dataset`. + + Gets a dataset. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].GetDataset + + @property + def list_datasets(self): + """Return the gRPC stub for :meth:`AutoMlClient.list_datasets`. + + Lists datasets in a project. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ListDatasets + + @property + def delete_dataset(self): + """Return the gRPC stub for :meth:`AutoMlClient.delete_dataset`. + + Deletes a dataset and all of its contents. Returns empty response in the + ``response`` field when it completes, and ``delete_details`` in the + ``metadata`` field. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].DeleteDataset + + @property + def import_data(self): + """Return the gRPC stub for :meth:`AutoMlClient.import_data`. + + Imports data into a dataset. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ImportData + + @property + def export_data(self): + """Return the gRPC stub for :meth:`AutoMlClient.export_data`. + + Exports dataset's data to the provided output location. Returns an empty + response in the ``response`` field when it completes. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ExportData + + @property + def create_model(self): + """Return the gRPC stub for :meth:`AutoMlClient.create_model`. + + Creates a model. Returns a Model in the ``response`` field when it + completes. When you create a model, several model evaluations are + created for it: a global evaluation, and one evaluation for each + annotation spec. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].CreateModel + + @property + def get_model(self): + """Return the gRPC stub for :meth:`AutoMlClient.get_model`. + + Gets a model. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].GetModel + + @property + def update_model(self): + """Return the gRPC stub for :meth:`AutoMlClient.update_model`. + + Updates a model. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].UpdateModel + + @property + def list_models(self): + """Return the gRPC stub for :meth:`AutoMlClient.list_models`. + + Lists models. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ListModels + + @property + def delete_model(self): + """Return the gRPC stub for :meth:`AutoMlClient.delete_model`. + + Deletes a model. Returns ``google.protobuf.Empty`` in the ``response`` + field when it completes, and ``delete_details`` in the ``metadata`` + field. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].DeleteModel + + @property + def get_model_evaluation(self): + """Return the gRPC stub for :meth:`AutoMlClient.get_model_evaluation`. + + Gets a model evaluation. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].GetModelEvaluation + + @property + def list_model_evaluations(self): + """Return the gRPC stub for :meth:`AutoMlClient.list_model_evaluations`. + + Lists model evaluations. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ListModelEvaluations diff --git a/automl/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py b/automl/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py new file mode 100644 index 000000000000..9fa5a6f8a12e --- /dev/null +++ b/automl/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers + +from google.cloud.automl_v1.proto import prediction_service_pb2_grpc + + +class PredictionServiceGrpcTransport(object): + """gRPC transport class providing stubs for + google.cloud.automl.v1 PredictionService API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, channel=None, credentials=None, address="automl.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + "prediction_service_stub": prediction_service_pb2_grpc.PredictionServiceStub( + channel + ) + } + + @classmethod + def create_channel( + cls, address="automl.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def predict(self): + """Return the gRPC stub for :meth:`PredictionServiceClient.predict`. + + Perform an online prediction. The prediction result will be directly + returned in the response. Available for following ML problems, and their + expected request payloads: + + - Translation - TextSnippet, content up to 25,000 characters, UTF-8 + encoded. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["prediction_service_stub"].Predict diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/__init__.py b/automl/google/cloud/automl_v1/proto/__init__.py similarity index 100% rename from videointelligence/google/cloud/videointelligence_v1beta1/gapic/__init__.py rename to automl/google/cloud/automl_v1/proto/__init__.py diff --git a/automl/google/cloud/automl_v1/proto/annotation_payload.proto b/automl/google/cloud/automl_v1/proto/annotation_payload.proto new file mode 100644 index 000000000000..9469c2618a49 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/annotation_payload.proto @@ -0,0 +1,39 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.automl.v1; + +import "google/cloud/automl/v1/translation.proto"; +import "google/protobuf/any.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/automl/v1;automl"; +option csharp_namespace = "Google.Cloud.AutoML.V1"; +option java_multiple_files = true; +option java_package = "com.google.cloud.automl.v1"; +option php_namespace = "Google\\Cloud\\AutoML\\V1"; +option ruby_package = "Google::Cloud::AutoML::V1"; + +// Contains annotation information that is relevant to AutoML. +message AnnotationPayload { + // Output only . Additional information about the annotation + // specific to the AutoML domain. + oneof detail { + // Annotation details for translation. + TranslationAnnotation translation = 2; + } +} diff --git a/automl/google/cloud/automl_v1/proto/annotation_payload_pb2.py b/automl/google/cloud/automl_v1/proto/annotation_payload_pb2.py new file mode 100644 index 000000000000..9f027e70e315 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/annotation_payload_pb2.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/annotation_payload.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.automl_v1.proto import ( + translation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_translation__pb2, +) +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/annotation_payload.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1P\001Z labels = 39; +} diff --git a/automl/google/cloud/automl_v1/proto/dataset_pb2.py b/automl/google/cloud/automl_v1/proto/dataset_pb2.py new file mode 100644 index 000000000000..95d16ad188e8 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/dataset_pb2.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/dataset.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.automl_v1.proto import ( + translation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_translation__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/dataset.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1P\001Z params = 2; +} + +// * For Translation: +// CSV file `translation.csv`, with each line in format: +// ML_USE,GCS_FILE_PATH +// GCS_FILE_PATH leads to a .TSV file which describes examples that have +// given ML_USE, using the following row format per line: +// TEXT_SNIPPET (in source language) \t TEXT_SNIPPET (in target +// language) +// +// `export_data__` +// where will be made +// BigQuery-dataset-name compatible (e.g. most special characters will +// become underscores), and timestamp will be in +// YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In that +// dataset a new table called `primary_table` will be created, and +// filled with precisely the same data as this obtained on import. +message OutputConfig { + // Required. The destination of the output. + oneof destination { + // The Google Cloud Storage location where the output is to be written to. + // For Image Object Detection, Text Extraction, Video Classification and + // Tables, in the given directory a new directory will be created with name: + // export_data-- where + // timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. All export + // output will be written into that directory. + GcsDestination gcs_destination = 1; + } +} + +// The Google Cloud Storage location for the input content. +message GcsSource { + // Required. Google Cloud Storage URIs to input files, up to 2000 characters + // long. Accepted forms: + // * Full object path, e.g. gs://bucket/directory/object.csv + repeated string input_uris = 1; +} + +// The Google Cloud Storage location where the output is to be written to. +message GcsDestination { + // Required. Google Cloud Storage URI to output directory, up to 2000 + // characters long. + // Accepted forms: + // * Prefix path: gs://bucket/directory + // The requesting user must have write permission to the bucket. + // The directory is created if it doesn't exist. + string output_uri_prefix = 1; +} diff --git a/automl/google/cloud/automl_v1/proto/io_pb2.py b/automl/google/cloud/automl_v1/proto/io_pb2.py new file mode 100644 index 000000000000..6413e9cb34a7 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/io_pb2.py @@ -0,0 +1,437 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/io.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/io.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1P\001Z_`` + where will be made BigQuery-dataset-name compatible (e.g. most special + characters will become underscores), and timestamp will be in + YYYY\_MM\_DDThh\_mm\_ss\_sssZ "based on ISO-8601" format. In that + dataset a new table called ``primary_table`` will be created, and filled + with precisely the same data as this obtained on import. + + + Attributes: + destination: + Required. The destination of the output. + gcs_destination: + The Google Cloud Storage location where the output is to be + written to. For Image Object Detection, Text Extraction, Video + Classification and Tables, in the given directory a new + directory will be created with name: export\_data-- where + timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. All + export output will be written into that directory. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.OutputConfig) + ), +) +_sym_db.RegisterMessage(OutputConfig) + +GcsSource = _reflection.GeneratedProtocolMessageType( + "GcsSource", + (_message.Message,), + dict( + DESCRIPTOR=_GCSSOURCE, + __module__="google.cloud.automl_v1.proto.io_pb2", + __doc__="""The Google Cloud Storage location for the input content. + + + Attributes: + input_uris: + Required. Google Cloud Storage URIs to input files, up to 2000 + characters long. Accepted forms: \* Full object path, e.g. + gs://bucket/directory/object.csv + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.GcsSource) + ), +) +_sym_db.RegisterMessage(GcsSource) + +GcsDestination = _reflection.GeneratedProtocolMessageType( + "GcsDestination", + (_message.Message,), + dict( + DESCRIPTOR=_GCSDESTINATION, + __module__="google.cloud.automl_v1.proto.io_pb2", + __doc__="""The Google Cloud Storage location where the output is to be written to. + + + Attributes: + output_uri_prefix: + Required. Google Cloud Storage URI to output directory, up to + 2000 characters long. Accepted forms: \* Prefix path: + gs://bucket/directory The requesting user must have write + permission to the bucket. The directory is created if it + doesn't exist. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.GcsDestination) + ), +) +_sym_db.RegisterMessage(GcsDestination) + + +DESCRIPTOR._options = None +_INPUTCONFIG_PARAMSENTRY._options = None +# @@protoc_insertion_point(module_scope) diff --git a/automl/google/cloud/automl_v1/proto/io_pb2_grpc.py b/automl/google/cloud/automl_v1/proto/io_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/io_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/automl/google/cloud/automl_v1/proto/model.proto b/automl/google/cloud/automl_v1/proto/model.proto new file mode 100644 index 000000000000..5f820b42001e --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/model.proto @@ -0,0 +1,86 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.automl.v1; + +import "google/cloud/automl/v1/translation.proto"; +import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/automl/v1;automl"; +option csharp_namespace = "Google.Cloud.AutoML.V1"; +option java_multiple_files = true; +option java_package = "com.google.cloud.automl.v1"; +option php_namespace = "Google\\Cloud\\AutoML\\V1"; +option ruby_package = "Google::Cloud::AutoML::V1"; + +// API proto representing a trained machine learning model. +message Model { + // Deployment state of the model. + enum DeploymentState { + // Should not be used, an un-set enum has this value by default. + DEPLOYMENT_STATE_UNSPECIFIED = 0; + + // Model is deployed. + DEPLOYED = 1; + + // Model is not deployed. + UNDEPLOYED = 2; + } + + // Required. + // The model metadata that is specific to the problem type. + // Must match the metadata type of the dataset used to train the model. + oneof model_metadata { + // Metadata for translation models. + TranslationModelMetadata translation_model_metadata = 15; + } + + // Output only. Resource name of the model. + // Format: `projects/{project_id}/locations/{location_id}/models/{model_id}` + string name = 1; + + // Required. The name of the model to show in the interface. The name can be + // up to 32 characters long and can consist only of ASCII Latin letters A-Z + // and a-z, underscores + // (_), and ASCII digits 0-9. It must start with a letter. + string display_name = 2; + + // Required. The resource ID of the dataset used to create the model. The dataset must + // come from the same ancestor project and location. + string dataset_id = 3; + + // Output only. Timestamp when the model training finished and can be used for prediction. + google.protobuf.Timestamp create_time = 7; + + // Output only. Timestamp when this model was last updated. + google.protobuf.Timestamp update_time = 11; + + // Output only. Deployment state of the model. A model can only serve + // prediction requests after it gets deployed. + DeploymentState deployment_state = 8; + + // Optional. The labels with user-defined metadata to organize your model. + // + // Label keys and values can be no longer than 64 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://goo.gl/xmQnxf for more information on and examples of labels. + map labels = 34; +} diff --git a/automl/google/cloud/automl_v1/proto/model_evaluation.proto b/automl/google/cloud/automl_v1/proto/model_evaluation.proto new file mode 100644 index 000000000000..fe9df1b94887 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/model_evaluation.proto @@ -0,0 +1,62 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.automl.v1; + +import "google/cloud/automl/v1/translation.proto"; +import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/automl/v1;automl"; +option csharp_namespace = "Google.Cloud.AutoML.V1"; +option java_multiple_files = true; +option java_package = "com.google.cloud.automl.v1"; +option php_namespace = "Google\\Cloud\\AutoML\\V1"; +option ruby_package = "Google::Cloud::AutoML::V1"; + +// Evaluation results of a model. +message ModelEvaluation { + // Output only. Problem type specific evaluation metrics. + oneof metrics { + // Model evaluation metrics for translation. + TranslationEvaluationMetrics translation_evaluation_metrics = 9; + } + + // Output only. Resource name of the model evaluation. + // Format: + // + // `projects/{project_id}/locations/{location_id}/models/{model_id}/modelEvaluations/{model_evaluation_id}` + string name = 1; + + // Output only. The ID of the annotation spec that the model evaluation applies to. The + // The ID is empty for the overall model evaluation. + string annotation_spec_id = 2; + + // Output only. Timestamp when this model evaluation was created. + google.protobuf.Timestamp create_time = 5; + + // Output only. The number of examples used for model evaluation, i.e. for + // which ground truth from time of model creation is compared against the + // predicted annotations created by the model. + // For overall ModelEvaluation (i.e. with annotation_spec_id not set) this is + // the total number of all examples used for evaluation. + // Otherwise, this is the count of examples that according to the ground + // truth were annotated by the + // + // [annotation_spec_id][google.cloud.automl.v1beta1.ModelEvaluation.annotation_spec_id]. + int32 evaluated_example_count = 6; +} diff --git a/automl/google/cloud/automl_v1/proto/model_evaluation_pb2.py b/automl/google/cloud/automl_v1/proto/model_evaluation_pb2.py new file mode 100644 index 000000000000..ec05252d574e --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/model_evaluation_pb2.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/model_evaluation.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.automl_v1.proto import ( + translation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_translation__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/model_evaluation.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1P\001Z params = 3; +} + +// Response message for +// [PredictionService.Predict][google.cloud.automl.v1.PredictionService.Predict]. +message PredictResponse { + // Prediction result. + // Translation and Text Sentiment will return precisely one payload. + repeated AnnotationPayload payload = 1; + + // Additional domain-specific prediction response metadata. + map metadata = 2; +} diff --git a/automl/google/cloud/automl_v1/proto/prediction_service_pb2.py b/automl/google/cloud/automl_v1/proto/prediction_service_pb2.py new file mode 100644 index 000000000000..9d438e5f321a --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/prediction_service_pb2.py @@ -0,0 +1,422 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/prediction_service.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.cloud.automl_v1.proto import ( + annotation_payload_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_annotation__payload__pb2, +) +from google.cloud.automl_v1.proto import ( + data_items_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_data__items__pb2, +) +from google.cloud.automl_v1.proto import ( + io_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_io__pb2, +) +from google.cloud.automl_v1.proto import ( + operations_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_operations__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/prediction_service.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1B\026PredictionServiceProtoP\001Z The dataset has + // translation_dataset_metadata. + string filter = 3; + + // Requested page size. Server may return fewer results than requested. + // If unspecified, server will pick a default size. + int32 page_size = 4; + + // A token identifying a page of results for the server to return + // Typically obtained via + // [ListDatasetsResponse.next_page_token][google.cloud.automl.v1.ListDatasetsResponse.next_page_token] + // of the previous + // [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets] call. + string page_token = 6; +} + +// Response message for +// [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. +message ListDatasetsResponse { + // The datasets read. + repeated Dataset datasets = 1; + + // A token to retrieve next page of results. + // Pass to + // [ListDatasetsRequest.page_token][google.cloud.automl.v1.ListDatasetsRequest.page_token] + // to obtain that page. + string next_page_token = 2; +} + +// Request message for +// [AutoMl.UpdateDataset][google.cloud.automl.v1.AutoMl.UpdateDataset] +message UpdateDatasetRequest { + // The dataset which replaces the resource on the server. + Dataset dataset = 1; + + // Required. The update mask applies to the resource. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for +// [AutoMl.DeleteDataset][google.cloud.automl.v1.AutoMl.DeleteDataset]. +message DeleteDatasetRequest { + // The resource name of the dataset to delete. + string name = 1; +} + +// Request message for +// [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData]. +message ImportDataRequest { + // Required. Dataset name. Dataset must already exist. All imported + // annotations and examples will be added. + string name = 1; + + // Required. The desired input location and its domain specific semantics, + // if any. + InputConfig input_config = 3; +} + +// Request message for +// [AutoMl.ExportData][google.cloud.automl.v1.AutoMl.ExportData]. +message ExportDataRequest { + // Required. The resource name of the dataset. + string name = 1; + + // Required. The desired output location. + OutputConfig output_config = 3; +} + +// Request message for +// [AutoMl.CreateModel][google.cloud.automl.v1.AutoMl.CreateModel]. +message CreateModelRequest { + // Resource name of the parent project where the model is being created. + string parent = 1; + + // The model to create. + Model model = 4; +} + +// Request message for +// [AutoMl.GetModel][google.cloud.automl.v1.AutoMl.GetModel]. +message GetModelRequest { + // Resource name of the model. + string name = 1; +} + +// Request message for +// [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. +message ListModelsRequest { + // Resource name of the project, from which to list the models. + string parent = 1; + + // An expression for filtering the results of the request. + // + // * `model_metadata` - for existence of the case (e.g. + // video_classification_model_metadata:*). + // * `dataset_id` - for = or !=. Some examples of using the filter are: + // + // * `image_classification_model_metadata:*` --> The model has + // image_classification_model_metadata. + // * `dataset_id=5` --> The model was created from a dataset with ID 5. + string filter = 3; + + // Requested page size. + int32 page_size = 4; + + // A token identifying a page of results for the server to return + // Typically obtained via + // [ListModelsResponse.next_page_token][google.cloud.automl.v1.ListModelsResponse.next_page_token] + // of the previous + // [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels] call. + string page_token = 6; +} + +// Response message for +// [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. +message ListModelsResponse { + // List of models in the requested page. + repeated Model model = 1; + + // A token to retrieve next page of results. + // Pass to + // [ListModelsRequest.page_token][google.cloud.automl.v1.ListModelsRequest.page_token] + // to obtain that page. + string next_page_token = 2; +} + +// Request message for +// [AutoMl.DeleteModel][google.cloud.automl.v1.AutoMl.DeleteModel]. +message DeleteModelRequest { + // Resource name of the model being deleted. + string name = 1; +} + +// Request message for +// [AutoMl.UpdateModel][google.cloud.automl.v1.AutoMl.UpdateModel] +message UpdateModelRequest { + // The model which replaces the resource on the server. + Model model = 1; + + // Required. The update mask applies to the resource. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for +// [AutoMl.GetModelEvaluation][google.cloud.automl.v1.AutoMl.GetModelEvaluation]. +message GetModelEvaluationRequest { + // Resource name for the model evaluation. + string name = 1; +} + +// Request message for +// [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. +message ListModelEvaluationsRequest { + // Resource name of the model to list the model evaluations for. + // If modelId is set as "-", this will list model evaluations from across all + // models of the parent location. + string parent = 1; + + // An expression for filtering the results of the request. + // + // * `annotation_spec_id` - for =, != or existence. See example below for + // the last. + // + // Some examples of using the filter are: + // + // * `annotation_spec_id!=4` --> The model evaluation was done for + // annotation spec with ID different than 4. + // * `NOT annotation_spec_id:*` --> The model evaluation was done for + // aggregate of all annotation specs. + string filter = 3; + + // Requested page size. + int32 page_size = 4; + + // A token identifying a page of results for the server to return. + // Typically obtained via + // [ListModelEvaluationsResponse.next_page_token][google.cloud.automl.v1.ListModelEvaluationsResponse.next_page_token] + // of the previous + // [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations] + // call. + string page_token = 6; +} + +// Response message for +// [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. +message ListModelEvaluationsResponse { + // List of model evaluations in the requested page. + repeated ModelEvaluation model_evaluation = 1; + + // A token to retrieve next page of results. + // Pass to the + // [ListModelEvaluationsRequest.page_token][google.cloud.automl.v1.ListModelEvaluationsRequest.page_token] + // field of a new + // [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations] + // request to obtain that page. + string next_page_token = 2; +} diff --git a/automl/google/cloud/automl_v1/proto/service_pb2.py b/automl/google/cloud/automl_v1/proto/service_pb2.py new file mode 100644 index 000000000000..093dfb1f072b --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/service_pb2.py @@ -0,0 +1,1693 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/service.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.cloud.automl_v1.proto import ( + annotation_payload_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_annotation__payload__pb2, +) +from google.cloud.automl_v1.proto import ( + dataset_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2, +) +from google.cloud.automl_v1.proto import ( + io_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_io__pb2, +) +from google.cloud.automl_v1.proto import ( + model_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2, +) +from google.cloud.automl_v1.proto import ( + model_evaluation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2, +) +from google.cloud.automl_v1.proto import ( + operations_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_operations__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/service.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1B\013AutoMlProtoP\001Z The dataset has + translation\_dataset\_metadata. + page_size: + Requested page size. Server may return fewer results than + requested. If unspecified, server will pick a default size. + page_token: + A token identifying a page of results for the server to return + Typically obtained via [ListDatasetsResponse.next\_page\_token + ][google.cloud.automl.v1.ListDatasetsResponse.next\_page\_toke + n] of the previous [AutoMl.ListDatasets][google.cloud.automl.v + 1.AutoMl.ListDatasets] call. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListDatasetsRequest) + ), +) +_sym_db.RegisterMessage(ListDatasetsRequest) + +ListDatasetsResponse = _reflection.GeneratedProtocolMessageType( + "ListDatasetsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTDATASETSRESPONSE, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Response message for + [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. + + + Attributes: + datasets: + The datasets read. + next_page_token: + A token to retrieve next page of results. Pass to [ListDataset + sRequest.page\_token][google.cloud.automl.v1.ListDatasetsReque + st.page\_token] to obtain that page. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListDatasetsResponse) + ), +) +_sym_db.RegisterMessage(ListDatasetsResponse) + +UpdateDatasetRequest = _reflection.GeneratedProtocolMessageType( + "UpdateDatasetRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATEDATASETREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.UpdateDataset][google.cloud.automl.v1.AutoMl.UpdateDataset] + + + Attributes: + dataset: + The dataset which replaces the resource on the server. + update_mask: + Required. The update mask applies to the resource. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.UpdateDatasetRequest) + ), +) +_sym_db.RegisterMessage(UpdateDatasetRequest) + +DeleteDatasetRequest = _reflection.GeneratedProtocolMessageType( + "DeleteDatasetRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEDATASETREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.DeleteDataset][google.cloud.automl.v1.AutoMl.DeleteDataset]. + + + Attributes: + name: + The resource name of the dataset to delete. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.DeleteDatasetRequest) + ), +) +_sym_db.RegisterMessage(DeleteDatasetRequest) + +ImportDataRequest = _reflection.GeneratedProtocolMessageType( + "ImportDataRequest", + (_message.Message,), + dict( + DESCRIPTOR=_IMPORTDATAREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData]. + + + Attributes: + name: + Required. Dataset name. Dataset must already exist. All + imported annotations and examples will be added. + input_config: + Required. The desired input location and its domain specific + semantics, if any. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ImportDataRequest) + ), +) +_sym_db.RegisterMessage(ImportDataRequest) + +ExportDataRequest = _reflection.GeneratedProtocolMessageType( + "ExportDataRequest", + (_message.Message,), + dict( + DESCRIPTOR=_EXPORTDATAREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.ExportData][google.cloud.automl.v1.AutoMl.ExportData]. + + + Attributes: + name: + Required. The resource name of the dataset. + output_config: + Required. The desired output location. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ExportDataRequest) + ), +) +_sym_db.RegisterMessage(ExportDataRequest) + +CreateModelRequest = _reflection.GeneratedProtocolMessageType( + "CreateModelRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEMODELREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.CreateModel][google.cloud.automl.v1.AutoMl.CreateModel]. + + + Attributes: + parent: + Resource name of the parent project where the model is being + created. + model: + The model to create. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.CreateModelRequest) + ), +) +_sym_db.RegisterMessage(CreateModelRequest) + +GetModelRequest = _reflection.GeneratedProtocolMessageType( + "GetModelRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETMODELREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.GetModel][google.cloud.automl.v1.AutoMl.GetModel]. + + + Attributes: + name: + Resource name of the model. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.GetModelRequest) + ), +) +_sym_db.RegisterMessage(GetModelRequest) + +ListModelsRequest = _reflection.GeneratedProtocolMessageType( + "ListModelsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMODELSREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. + + + Attributes: + parent: + Resource name of the project, from which to list the models. + filter: + An expression for filtering the results of the request. - + ``model_metadata`` - for existence of the case (e.g. + video\_classification\_model\_metadata:\*). - ``dataset_id`` + - for = or !=. Some examples of using the filter are: - + ``image_classification_model_metadata:*`` --> The model has + image\_classification\_model\_metadata. - ``dataset_id=5`` + --> The model was created from a dataset with ID 5. + page_size: + Requested page size. + page_token: + A token identifying a page of results for the server to return + Typically obtained via [ListModelsResponse.next\_page\_token][ + google.cloud.automl.v1.ListModelsResponse.next\_page\_token] + of the previous + [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels] + call. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListModelsRequest) + ), +) +_sym_db.RegisterMessage(ListModelsRequest) + +ListModelsResponse = _reflection.GeneratedProtocolMessageType( + "ListModelsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMODELSRESPONSE, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Response message for + [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. + + + Attributes: + model: + List of models in the requested page. + next_page_token: + A token to retrieve next page of results. Pass to [ListModelsR + equest.page\_token][google.cloud.automl.v1.ListModelsRequest.p + age\_token] to obtain that page. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListModelsResponse) + ), +) +_sym_db.RegisterMessage(ListModelsResponse) + +DeleteModelRequest = _reflection.GeneratedProtocolMessageType( + "DeleteModelRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEMODELREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.DeleteModel][google.cloud.automl.v1.AutoMl.DeleteModel]. + + + Attributes: + name: + Resource name of the model being deleted. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.DeleteModelRequest) + ), +) +_sym_db.RegisterMessage(DeleteModelRequest) + +UpdateModelRequest = _reflection.GeneratedProtocolMessageType( + "UpdateModelRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATEMODELREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.UpdateModel][google.cloud.automl.v1.AutoMl.UpdateModel] + + + Attributes: + model: + The model which replaces the resource on the server. + update_mask: + Required. The update mask applies to the resource. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.UpdateModelRequest) + ), +) +_sym_db.RegisterMessage(UpdateModelRequest) + +GetModelEvaluationRequest = _reflection.GeneratedProtocolMessageType( + "GetModelEvaluationRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETMODELEVALUATIONREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.GetModelEvaluation][google.cloud.automl.v1.AutoMl.GetModelEvaluation]. + + + Attributes: + name: + Resource name for the model evaluation. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.GetModelEvaluationRequest) + ), +) +_sym_db.RegisterMessage(GetModelEvaluationRequest) + +ListModelEvaluationsRequest = _reflection.GeneratedProtocolMessageType( + "ListModelEvaluationsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMODELEVALUATIONSREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. + + + Attributes: + parent: + Resource name of the model to list the model evaluations for. + If modelId is set as "-", this will list model evaluations + from across all models of the parent location. + filter: + An expression for filtering the results of the request. - + ``annotation_spec_id`` - for =, != or existence. See example + below for the last. Some examples of using the filter are: + - ``annotation_spec_id!=4`` --> The model evaluation was done + for annotation spec with ID different than 4. - ``NOT + annotation_spec_id:*`` --> The model evaluation was done for + aggregate of all annotation specs. + page_size: + Requested page size. + page_token: + A token identifying a page of results for the server to + return. Typically obtained via [ListModelEvaluationsResponse.n + ext\_page\_token][google.cloud.automl.v1.ListModelEvaluationsR + esponse.next\_page\_token] of the previous [AutoMl.ListModelEv + aluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations] + call. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListModelEvaluationsRequest) + ), +) +_sym_db.RegisterMessage(ListModelEvaluationsRequest) + +ListModelEvaluationsResponse = _reflection.GeneratedProtocolMessageType( + "ListModelEvaluationsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMODELEVALUATIONSRESPONSE, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Response message for + [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. + + + Attributes: + model_evaluation: + List of model evaluations in the requested page. + next_page_token: + A token to retrieve next page of results. Pass to the [ListMod + elEvaluationsRequest.page\_token][google.cloud.automl.v1.ListM + odelEvaluationsRequest.page\_token] field of a new [AutoMl.Lis + tModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEval + uations] request to obtain that page. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListModelEvaluationsResponse) + ), +) +_sym_db.RegisterMessage(ListModelEvaluationsResponse) + + +DESCRIPTOR._options = None + +_AUTOML = _descriptor.ServiceDescriptor( + name="AutoMl", + full_name="google.cloud.automl.v1.AutoMl", + file=DESCRIPTOR, + index=0, + serialized_options=_b( + "\312A\025automl.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1871, + serialized_end=4179, + methods=[ + _descriptor.MethodDescriptor( + name="CreateDataset", + full_name="google.cloud.automl.v1.AutoMl.CreateDataset", + index=0, + containing_service=None, + input_type=_CREATEDATASETREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\0027",/v1/{parent=projects/*/locations/*}/datasets:\007dataset' + ), + ), + _descriptor.MethodDescriptor( + name="GetDataset", + full_name="google.cloud.automl.v1.AutoMl.GetDataset", + index=1, + containing_service=None, + input_type=_GETDATASETREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2._DATASET, + serialized_options=_b( + "\202\323\344\223\002.\022,/v1/{name=projects/*/locations/*/datasets/*}" + ), + ), + _descriptor.MethodDescriptor( + name="ListDatasets", + full_name="google.cloud.automl.v1.AutoMl.ListDatasets", + index=2, + containing_service=None, + input_type=_LISTDATASETSREQUEST, + output_type=_LISTDATASETSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002.\022,/v1/{parent=projects/*/locations/*}/datasets" + ), + ), + _descriptor.MethodDescriptor( + name="UpdateDataset", + full_name="google.cloud.automl.v1.AutoMl.UpdateDataset", + index=3, + containing_service=None, + input_type=_UPDATEDATASETREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2._DATASET, + serialized_options=_b( + "\202\323\344\223\002?24/v1/{dataset.name=projects/*/locations/*/datasets/*}:\007dataset" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteDataset", + full_name="google.cloud.automl.v1.AutoMl.DeleteDataset", + index=4, + containing_service=None, + input_type=_DELETEDATASETREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + "\202\323\344\223\002.*,/v1/{name=projects/*/locations/*/datasets/*}" + ), + ), + _descriptor.MethodDescriptor( + name="ImportData", + full_name="google.cloud.automl.v1.AutoMl.ImportData", + index=5, + containing_service=None, + input_type=_IMPORTDATAREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002<"7/v1/{name=projects/*/locations/*/datasets/*}:importData:\001*' + ), + ), + _descriptor.MethodDescriptor( + name="ExportData", + full_name="google.cloud.automl.v1.AutoMl.ExportData", + index=6, + containing_service=None, + input_type=_EXPORTDATAREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002<"7/v1/{name=projects/*/locations/*/datasets/*}:exportData:\001*' + ), + ), + _descriptor.MethodDescriptor( + name="CreateModel", + full_name="google.cloud.automl.v1.AutoMl.CreateModel", + index=7, + containing_service=None, + input_type=_CREATEMODELREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\0023"*/v1/{parent=projects/*/locations/*}/models:\005model' + ), + ), + _descriptor.MethodDescriptor( + name="GetModel", + full_name="google.cloud.automl.v1.AutoMl.GetModel", + index=8, + containing_service=None, + input_type=_GETMODELREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2._MODEL, + serialized_options=_b( + "\202\323\344\223\002,\022*/v1/{name=projects/*/locations/*/models/*}" + ), + ), + _descriptor.MethodDescriptor( + name="ListModels", + full_name="google.cloud.automl.v1.AutoMl.ListModels", + index=9, + containing_service=None, + input_type=_LISTMODELSREQUEST, + output_type=_LISTMODELSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002,\022*/v1/{parent=projects/*/locations/*}/models" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteModel", + full_name="google.cloud.automl.v1.AutoMl.DeleteModel", + index=10, + containing_service=None, + input_type=_DELETEMODELREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + "\202\323\344\223\002,**/v1/{name=projects/*/locations/*/models/*}" + ), + ), + _descriptor.MethodDescriptor( + name="UpdateModel", + full_name="google.cloud.automl.v1.AutoMl.UpdateModel", + index=11, + containing_service=None, + input_type=_UPDATEMODELREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2._MODEL, + serialized_options=_b( + "\202\323\344\223\002920/v1/{model.name=projects/*/locations/*/models/*}:\005model" + ), + ), + _descriptor.MethodDescriptor( + name="GetModelEvaluation", + full_name="google.cloud.automl.v1.AutoMl.GetModelEvaluation", + index=12, + containing_service=None, + input_type=_GETMODELEVALUATIONREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2._MODELEVALUATION, + serialized_options=_b( + "\202\323\344\223\002?\022=/v1/{name=projects/*/locations/*/models/*/modelEvaluations/*}" + ), + ), + _descriptor.MethodDescriptor( + name="ListModelEvaluations", + full_name="google.cloud.automl.v1.AutoMl.ListModelEvaluations", + index=13, + containing_service=None, + input_type=_LISTMODELEVALUATIONSREQUEST, + output_type=_LISTMODELEVALUATIONSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002?\022=/v1/{parent=projects/*/locations/*/models/*}/modelEvaluations" + ), + ), + ], +) +_sym_db.RegisterServiceDescriptor(_AUTOML) + +DESCRIPTOR.services_by_name["AutoMl"] = _AUTOML + +# @@protoc_insertion_point(module_scope) diff --git a/automl/google/cloud/automl_v1/proto/service_pb2_grpc.py b/automl/google/cloud/automl_v1/proto/service_pb2_grpc.py new file mode 100644 index 000000000000..dd6beb5ca397 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/service_pb2_grpc.py @@ -0,0 +1,322 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.automl_v1.proto import ( + dataset_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2, +) +from google.cloud.automl_v1.proto import ( + model_evaluation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2, +) +from google.cloud.automl_v1.proto import ( + model_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2, +) +from google.cloud.automl_v1.proto import ( + service_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) + + +class AutoMlStub(object): + """AutoML Server API. + + The resource names are assigned by the server. + The server never reuses names that it has created after the resources with + those names are deleted. + + An ID of a resource is the last element of the item's resource name. For + `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`, then + the id for the item is `{dataset_id}`. + + Currently the only supported `location_id` is "us-central1". + + On any input that is documented to expect a string parameter in + snake_case or kebab-case, either of those cases is accepted. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateDataset = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/CreateDataset", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.CreateDatasetRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.GetDataset = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/GetDataset", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetDatasetRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2.Dataset.FromString, + ) + self.ListDatasets = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ListDatasets", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListDatasetsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListDatasetsResponse.FromString, + ) + self.UpdateDataset = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/UpdateDataset", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.UpdateDatasetRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2.Dataset.FromString, + ) + self.DeleteDataset = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/DeleteDataset", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.DeleteDatasetRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.ImportData = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ImportData", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ImportDataRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.ExportData = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ExportData", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ExportDataRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.CreateModel = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/CreateModel", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.CreateModelRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.GetModel = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/GetModel", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetModelRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2.Model.FromString, + ) + self.ListModels = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ListModels", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelsResponse.FromString, + ) + self.DeleteModel = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/DeleteModel", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.DeleteModelRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.UpdateModel = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/UpdateModel", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.UpdateModelRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2.Model.FromString, + ) + self.GetModelEvaluation = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/GetModelEvaluation", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetModelEvaluationRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2.ModelEvaluation.FromString, + ) + self.ListModelEvaluations = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ListModelEvaluations", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelEvaluationsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelEvaluationsResponse.FromString, + ) + + +class AutoMlServicer(object): + """AutoML Server API. + + The resource names are assigned by the server. + The server never reuses names that it has created after the resources with + those names are deleted. + + An ID of a resource is the last element of the item's resource name. For + `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`, then + the id for the item is `{dataset_id}`. + + Currently the only supported `location_id` is "us-central1". + + On any input that is documented to expect a string parameter in + snake_case or kebab-case, either of those cases is accepted. + """ + + def CreateDataset(self, request, context): + """Creates a dataset. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetDataset(self, request, context): + """Gets a dataset. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListDatasets(self, request, context): + """Lists datasets in a project. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def UpdateDataset(self, request, context): + """Updates a dataset. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteDataset(self, request, context): + """Deletes a dataset and all of its contents. + Returns empty response in the + [response][google.longrunning.Operation.response] field when it completes, + and `delete_details` in the + [metadata][google.longrunning.Operation.metadata] field. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ImportData(self, request, context): + """Imports data into a dataset. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ExportData(self, request, context): + """Exports dataset's data to the provided output location. + Returns an empty response in the + [response][google.longrunning.Operation.response] field when it completes. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def CreateModel(self, request, context): + """Creates a model. + Returns a Model in the [response][google.longrunning.Operation.response] + field when it completes. + When you create a model, several model evaluations are created for it: + a global evaluation, and one evaluation for each annotation spec. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetModel(self, request, context): + """Gets a model. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListModels(self, request, context): + """Lists models. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteModel(self, request, context): + """Deletes a model. + Returns `google.protobuf.Empty` in the + [response][google.longrunning.Operation.response] field when it completes, + and `delete_details` in the + [metadata][google.longrunning.Operation.metadata] field. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def UpdateModel(self, request, context): + """Updates a model. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetModelEvaluation(self, request, context): + """Gets a model evaluation. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListModelEvaluations(self, request, context): + """Lists model evaluations. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_AutoMlServicer_to_server(servicer, server): + rpc_method_handlers = { + "CreateDataset": grpc.unary_unary_rpc_method_handler( + servicer.CreateDataset, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.CreateDatasetRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "GetDataset": grpc.unary_unary_rpc_method_handler( + servicer.GetDataset, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetDatasetRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2.Dataset.SerializeToString, + ), + "ListDatasets": grpc.unary_unary_rpc_method_handler( + servicer.ListDatasets, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListDatasetsRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListDatasetsResponse.SerializeToString, + ), + "UpdateDataset": grpc.unary_unary_rpc_method_handler( + servicer.UpdateDataset, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.UpdateDatasetRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2.Dataset.SerializeToString, + ), + "DeleteDataset": grpc.unary_unary_rpc_method_handler( + servicer.DeleteDataset, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.DeleteDatasetRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ImportData": grpc.unary_unary_rpc_method_handler( + servicer.ImportData, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ImportDataRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ExportData": grpc.unary_unary_rpc_method_handler( + servicer.ExportData, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ExportDataRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "CreateModel": grpc.unary_unary_rpc_method_handler( + servicer.CreateModel, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.CreateModelRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "GetModel": grpc.unary_unary_rpc_method_handler( + servicer.GetModel, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetModelRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2.Model.SerializeToString, + ), + "ListModels": grpc.unary_unary_rpc_method_handler( + servicer.ListModels, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelsRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelsResponse.SerializeToString, + ), + "DeleteModel": grpc.unary_unary_rpc_method_handler( + servicer.DeleteModel, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.DeleteModelRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "UpdateModel": grpc.unary_unary_rpc_method_handler( + servicer.UpdateModel, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.UpdateModelRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2.Model.SerializeToString, + ), + "GetModelEvaluation": grpc.unary_unary_rpc_method_handler( + servicer.GetModelEvaluation, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetModelEvaluationRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2.ModelEvaluation.SerializeToString, + ), + "ListModelEvaluations": grpc.unary_unary_rpc_method_handler( + servicer.ListModelEvaluations, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelEvaluationsRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelEvaluationsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.cloud.automl.v1.AutoMl", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/automl/google/cloud/automl_v1/proto/translation.proto b/automl/google/cloud/automl_v1/proto/translation.proto new file mode 100644 index 000000000000..bc449fe79f58 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/translation.proto @@ -0,0 +1,70 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.automl.v1; + +import "google/cloud/automl/v1/data_items.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/automl/v1;automl"; +option csharp_namespace = "Google.Cloud.AutoML.V1"; +option java_multiple_files = true; +option java_outer_classname = "TranslationProto"; +option java_package = "com.google.cloud.automl.v1"; +option php_namespace = "Google\\Cloud\\AutoML\\V1"; +option ruby_package = "Google::Cloud::AutoML::V1"; + +// Dataset metadata that is specific to translation. +message TranslationDatasetMetadata { + // Required. The BCP-47 language code of the source language. + string source_language_code = 1; + + // Required. The BCP-47 language code of the target language. + string target_language_code = 2; +} + +// Evaluation metrics for the dataset. +message TranslationEvaluationMetrics { + // Output only. BLEU score. + double bleu_score = 1; + + // Output only. BLEU score for base model. + double base_bleu_score = 2; +} + +// Model metadata that is specific to translation. +message TranslationModelMetadata { + // The resource name of the model to use as a baseline to train the custom + // model. If unset, we use the default base model provided by Google + // Translate. Format: + // `projects/{project_id}/locations/{location_id}/models/{model_id}` + string base_model = 1; + + // Output only. Inferred from the dataset. + // The source languge (The BCP-47 language code) that is used for training. + string source_language_code = 2; + + // Output only. The target languge (The BCP-47 language code) that is used for + // training. + string target_language_code = 3; +} + +// Annotation details specific to translation. +message TranslationAnnotation { + // Output only . The translated content. + TextSnippet translated_content = 1; +} diff --git a/automl/google/cloud/automl_v1/proto/translation_pb2.py b/automl/google/cloud/automl_v1/proto/translation_pb2.py new file mode 100644 index 000000000000..4542dbc539d5 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/translation_pb2.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/translation.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.automl_v1.proto import ( + data_items_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_data__items__pb2, +) +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/translation.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1B\020TranslationProtoP\001Z\n\x0cinput_config\x18\x03 \x01(\x0b\x32(.google.cloud.automl.v1beta1.InputConfig"c\n\x11\x45xportDataRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\routput_config\x18\x03 \x01(\x0b\x32).google.cloud.automl.v1beta1.OutputConfig"(\n\x18GetAnnotationSpecRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"S\n\x13GetTableSpecRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\nfield_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\x8e\x01\n\x15ListTableSpecsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12.\n\nfield_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"n\n\x16ListTableSpecsResponse\x12;\n\x0btable_specs\x18\x01 \x03(\x0b\x32&.google.cloud.automl.v1beta1.TableSpec\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x85\x01\n\x16UpdateTableSpecRequest\x12:\n\ntable_spec\x18\x01 \x01(\x0b\x32&.google.cloud.automl.v1beta1.TableSpec\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"T\n\x14GetColumnSpecRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\nfield_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\x8f\x01\n\x16ListColumnSpecsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12.\n\nfield_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"q\n\x17ListColumnSpecsResponse\x12=\n\x0c\x63olumn_specs\x18\x01 \x03(\x0b\x32\'.google.cloud.automl.v1beta1.ColumnSpec\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x88\x01\n\x17UpdateColumnSpecRequest\x12<\n\x0b\x63olumn_spec\x18\x01 \x01(\x0b\x32\'.google.cloud.automl.v1beta1.ColumnSpec\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"W\n\x12\x43reateModelRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x31\n\x05model\x18\x04 \x01(\x0b\x32".google.cloud.automl.v1beta1.Model"\x1f\n\x0fGetModelRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"Z\n\x11ListModelsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"`\n\x12ListModelsResponse\x12\x31\n\x05model\x18\x01 \x03(\x0b\x32".google.cloud.automl.v1beta1.Model\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t""\n\x12\x44\x65leteModelRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\xc6\x01\n\x12\x44\x65ployModelRequest\x12\x84\x01\n0image_object_detection_model_deployment_metadata\x18\x02 \x01(\x0b\x32H.google.cloud.automl.v1beta1.ImageObjectDetectionModelDeploymentMetadataH\x00\x12\x0c\n\x04name\x18\x01 \x01(\tB\x1b\n\x19model_deployment_metadata"$\n\x14UndeployModelRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"o\n\x12\x45xportModelRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12K\n\routput_config\x18\x03 \x01(\x0b\x32\x34.google.cloud.automl.v1beta1.ModelExportOutputConfig"\x87\x01\n\x1e\x45xportEvaluatedExamplesRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12W\n\routput_config\x18\x03 \x01(\x0b\x32@.google.cloud.automl.v1beta1.ExportEvaluatedExamplesOutputConfig")\n\x19GetModelEvaluationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"d\n\x1bListModelEvaluationsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"\x7f\n\x1cListModelEvaluationsResponse\x12\x46\n\x10model_evaluation\x18\x01 \x03(\x0b\x32,.google.cloud.automl.v1beta1.ModelEvaluation\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd9"\n\x06\x41utoMl\x12\xac\x01\n\rCreateDataset\x12\x31.google.cloud.automl.v1beta1.CreateDatasetRequest\x1a$.google.cloud.automl.v1beta1.Dataset"B\x82\xd3\xe4\x93\x02<"1/v1beta1/{parent=projects/*/locations/*}/datasets:\x07\x64\x61taset\x12\x9d\x01\n\nGetDataset\x12..google.cloud.automl.v1beta1.GetDatasetRequest\x1a$.google.cloud.automl.v1beta1.Dataset"9\x82\xd3\xe4\x93\x02\x33\x12\x31/v1beta1/{name=projects/*/locations/*/datasets/*}\x12\xae\x01\n\x0cListDatasets\x12\x30.google.cloud.automl.v1beta1.ListDatasetsRequest\x1a\x31.google.cloud.automl.v1beta1.ListDatasetsResponse"9\x82\xd3\xe4\x93\x02\x33\x12\x31/v1beta1/{parent=projects/*/locations/*}/datasets\x12\xb4\x01\n\rUpdateDataset\x12\x31.google.cloud.automl.v1beta1.UpdateDatasetRequest\x1a$.google.cloud.automl.v1beta1.Dataset"J\x82\xd3\xe4\x93\x02\x44\x32\x39/v1beta1/{dataset.name=projects/*/locations/*/datasets/*}:\x07\x64\x61taset\x12\x9c\x01\n\rDeleteDataset\x12\x31.google.cloud.automl.v1beta1.DeleteDatasetRequest\x1a\x1d.google.longrunning.Operation"9\x82\xd3\xe4\x93\x02\x33*1/v1beta1/{name=projects/*/locations/*/datasets/*}\x12\xa4\x01\n\nImportData\x12..google.cloud.automl.v1beta1.ImportDataRequest\x1a\x1d.google.longrunning.Operation"G\x82\xd3\xe4\x93\x02\x41"/v1beta1/{name=projects/*/locations/*/datasets/*/tableSpecs/*}\x12\xc1\x01\n\x0eListTableSpecs\x12\x32.google.cloud.automl.v1beta1.ListTableSpecsRequest\x1a\x33.google.cloud.automl.v1beta1.ListTableSpecsResponse"F\x82\xd3\xe4\x93\x02@\x12>/v1beta1/{parent=projects/*/locations/*/datasets/*}/tableSpecs\x12\xcd\x01\n\x0fUpdateTableSpec\x12\x33.google.cloud.automl.v1beta1.UpdateTableSpecRequest\x1a&.google.cloud.automl.v1beta1.TableSpec"]\x82\xd3\xe4\x93\x02W2I/v1beta1/{table_spec.name=projects/*/locations/*/datasets/*/tableSpecs/*}:\ntable_spec\x12\xc1\x01\n\rGetColumnSpec\x12\x31.google.cloud.automl.v1beta1.GetColumnSpecRequest\x1a\'.google.cloud.automl.v1beta1.ColumnSpec"T\x82\xd3\xe4\x93\x02N\x12L/v1beta1/{name=projects/*/locations/*/datasets/*/tableSpecs/*/columnSpecs/*}\x12\xd2\x01\n\x0fListColumnSpecs\x12\x33.google.cloud.automl.v1beta1.ListColumnSpecsRequest\x1a\x34.google.cloud.automl.v1beta1.ListColumnSpecsResponse"T\x82\xd3\xe4\x93\x02N\x12L/v1beta1/{parent=projects/*/locations/*/datasets/*/tableSpecs/*}/columnSpecs\x12\xe0\x01\n\x10UpdateColumnSpec\x12\x34.google.cloud.automl.v1beta1.UpdateColumnSpecRequest\x1a\'.google.cloud.automl.v1beta1.ColumnSpec"m\x82\xd3\xe4\x93\x02g2X/v1beta1/{column_spec.name=projects/*/locations/*/datasets/*/tableSpecs/*/columnSpecs/*}:\x0b\x63olumn_spec\x12\x9d\x01\n\x0b\x43reateModel\x12/.google.cloud.automl.v1beta1.CreateModelRequest\x1a\x1d.google.longrunning.Operation">\x82\xd3\xe4\x93\x02\x38"//v1beta1/{parent=projects/*/locations/*}/models:\x05model\x12\x95\x01\n\x08GetModel\x12,.google.cloud.automl.v1beta1.GetModelRequest\x1a".google.cloud.automl.v1beta1.Model"7\x82\xd3\xe4\x93\x02\x31\x12//v1beta1/{name=projects/*/locations/*/models/*}\x12\xa6\x01\n\nListModels\x12..google.cloud.automl.v1beta1.ListModelsRequest\x1a/.google.cloud.automl.v1beta1.ListModelsResponse"7\x82\xd3\xe4\x93\x02\x31\x12//v1beta1/{parent=projects/*/locations/*}/models\x12\x96\x01\n\x0b\x44\x65leteModel\x12/.google.cloud.automl.v1beta1.DeleteModelRequest\x1a\x1d.google.longrunning.Operation"7\x82\xd3\xe4\x93\x02\x31*//v1beta1/{name=projects/*/locations/*/models/*}\x12\xa0\x01\n\x0b\x44\x65ployModel\x12/.google.cloud.automl.v1beta1.DeployModelRequest\x1a\x1d.google.longrunning.Operation"A\x82\xd3\xe4\x93\x02;"6/v1beta1/{name=projects/*/locations/*/models/*}:deploy:\x01*\x12\xa6\x01\n\rUndeployModel\x12\x31.google.cloud.automl.v1beta1.UndeployModelRequest\x1a\x1d.google.longrunning.Operation"C\x82\xd3\xe4\x93\x02="8/v1beta1/{name=projects/*/locations/*/models/*}:undeploy:\x01*\x12\xa0\x01\n\x0b\x45xportModel\x12/.google.cloud.automl.v1beta1.ExportModelRequest\x1a\x1d.google.longrunning.Operation"A\x82\xd3\xe4\x93\x02;"6/v1beta1/{name=projects/*/locations/*/models/*}:export:\x01*\x12\xc9\x01\n\x17\x45xportEvaluatedExamples\x12;.google.cloud.automl.v1beta1.ExportEvaluatedExamplesRequest\x1a\x1d.google.longrunning.Operation"R\x82\xd3\xe4\x93\x02L"G/v1beta1/{name=projects/*/locations/*/models/*}:exportEvaluatedExamples:\x01*\x12\xc6\x01\n\x12GetModelEvaluation\x12\x36.google.cloud.automl.v1beta1.GetModelEvaluationRequest\x1a,.google.cloud.automl.v1beta1.ModelEvaluation"J\x82\xd3\xe4\x93\x02\x44\x12\x42/v1beta1/{name=projects/*/locations/*/models/*/modelEvaluations/*}\x12\xd7\x01\n\x14ListModelEvaluations\x12\x38.google.cloud.automl.v1beta1.ListModelEvaluationsRequest\x1a\x39.google.cloud.automl.v1beta1.ListModelEvaluationsResponse"J\x82\xd3\xe4\x93\x02\x44\x12\x42/v1beta1/{parent=projects/*/locations/*/models/*}/modelEvaluations\x1aI\xca\x41\x15\x61utoml.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xb2\x01\n\x1f\x63om.google.cloud.automl.v1beta1B\x0b\x41utoMlProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/automl/v1beta1;automl\xca\x02\x1bGoogle\\Cloud\\AutoMl\\V1beta1\xea\x02\x1eGoogle::Cloud::AutoML::V1beta1b\x06proto3' + '\n/google/cloud/automl_v1beta1/proto/service.proto\x12\x1bgoogle.cloud.automl.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a:google/cloud/automl_v1beta1/proto/annotation_payload.proto\x1a\x37google/cloud/automl_v1beta1/proto/annotation_spec.proto\x1a\x33google/cloud/automl_v1beta1/proto/column_spec.proto\x1a/google/cloud/automl_v1beta1/proto/dataset.proto\x1a-google/cloud/automl_v1beta1/proto/image.proto\x1a*google/cloud/automl_v1beta1/proto/io.proto\x1a-google/cloud/automl_v1beta1/proto/model.proto\x1a\x38google/cloud/automl_v1beta1/proto/model_evaluation.proto\x1a\x32google/cloud/automl_v1beta1/proto/operations.proto\x1a\x32google/cloud/automl_v1beta1/proto/table_spec.proto\x1a#google/longrunning/operations.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/api/client.proto"]\n\x14\x43reateDatasetRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x35\n\x07\x64\x61taset\x18\x02 \x01(\x0b\x32$.google.cloud.automl.v1beta1.Dataset"!\n\x11GetDatasetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x13ListDatasetsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"g\n\x14ListDatasetsResponse\x12\x36\n\x08\x64\x61tasets\x18\x01 \x03(\x0b\x32$.google.cloud.automl.v1beta1.Dataset\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"~\n\x14UpdateDatasetRequest\x12\x35\n\x07\x64\x61taset\x18\x01 \x01(\x0b\x32$.google.cloud.automl.v1beta1.Dataset\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"$\n\x14\x44\x65leteDatasetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"a\n\x11ImportDataRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12>\n\x0cinput_config\x18\x03 \x01(\x0b\x32(.google.cloud.automl.v1beta1.InputConfig"c\n\x11\x45xportDataRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\routput_config\x18\x03 \x01(\x0b\x32).google.cloud.automl.v1beta1.OutputConfig"(\n\x18GetAnnotationSpecRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"S\n\x13GetTableSpecRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\nfield_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\x8e\x01\n\x15ListTableSpecsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12.\n\nfield_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"n\n\x16ListTableSpecsResponse\x12;\n\x0btable_specs\x18\x01 \x03(\x0b\x32&.google.cloud.automl.v1beta1.TableSpec\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x85\x01\n\x16UpdateTableSpecRequest\x12:\n\ntable_spec\x18\x01 \x01(\x0b\x32&.google.cloud.automl.v1beta1.TableSpec\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"T\n\x14GetColumnSpecRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\nfield_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\x8f\x01\n\x16ListColumnSpecsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12.\n\nfield_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"q\n\x17ListColumnSpecsResponse\x12=\n\x0c\x63olumn_specs\x18\x01 \x03(\x0b\x32\'.google.cloud.automl.v1beta1.ColumnSpec\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x88\x01\n\x17UpdateColumnSpecRequest\x12<\n\x0b\x63olumn_spec\x18\x01 \x01(\x0b\x32\'.google.cloud.automl.v1beta1.ColumnSpec\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"W\n\x12\x43reateModelRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x31\n\x05model\x18\x04 \x01(\x0b\x32".google.cloud.automl.v1beta1.Model"\x1f\n\x0fGetModelRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"Z\n\x11ListModelsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"`\n\x12ListModelsResponse\x12\x31\n\x05model\x18\x01 \x03(\x0b\x32".google.cloud.automl.v1beta1.Model\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t""\n\x12\x44\x65leteModelRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\xca\x02\n\x12\x44\x65ployModelRequest\x12\x84\x01\n0image_object_detection_model_deployment_metadata\x18\x02 \x01(\x0b\x32H.google.cloud.automl.v1beta1.ImageObjectDetectionModelDeploymentMetadataH\x00\x12\x81\x01\n.image_classification_model_deployment_metadata\x18\x04 \x01(\x0b\x32G.google.cloud.automl.v1beta1.ImageClassificationModelDeploymentMetadataH\x00\x12\x0c\n\x04name\x18\x01 \x01(\tB\x1b\n\x19model_deployment_metadata"$\n\x14UndeployModelRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"o\n\x12\x45xportModelRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12K\n\routput_config\x18\x03 \x01(\x0b\x32\x34.google.cloud.automl.v1beta1.ModelExportOutputConfig"\x87\x01\n\x1e\x45xportEvaluatedExamplesRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12W\n\routput_config\x18\x03 \x01(\x0b\x32@.google.cloud.automl.v1beta1.ExportEvaluatedExamplesOutputConfig")\n\x19GetModelEvaluationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"d\n\x1bListModelEvaluationsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"\x7f\n\x1cListModelEvaluationsResponse\x12\x46\n\x10model_evaluation\x18\x01 \x03(\x0b\x32,.google.cloud.automl.v1beta1.ModelEvaluation\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd9"\n\x06\x41utoMl\x12\xac\x01\n\rCreateDataset\x12\x31.google.cloud.automl.v1beta1.CreateDatasetRequest\x1a$.google.cloud.automl.v1beta1.Dataset"B\x82\xd3\xe4\x93\x02<"1/v1beta1/{parent=projects/*/locations/*}/datasets:\x07\x64\x61taset\x12\x9d\x01\n\nGetDataset\x12..google.cloud.automl.v1beta1.GetDatasetRequest\x1a$.google.cloud.automl.v1beta1.Dataset"9\x82\xd3\xe4\x93\x02\x33\x12\x31/v1beta1/{name=projects/*/locations/*/datasets/*}\x12\xae\x01\n\x0cListDatasets\x12\x30.google.cloud.automl.v1beta1.ListDatasetsRequest\x1a\x31.google.cloud.automl.v1beta1.ListDatasetsResponse"9\x82\xd3\xe4\x93\x02\x33\x12\x31/v1beta1/{parent=projects/*/locations/*}/datasets\x12\xb4\x01\n\rUpdateDataset\x12\x31.google.cloud.automl.v1beta1.UpdateDatasetRequest\x1a$.google.cloud.automl.v1beta1.Dataset"J\x82\xd3\xe4\x93\x02\x44\x32\x39/v1beta1/{dataset.name=projects/*/locations/*/datasets/*}:\x07\x64\x61taset\x12\x9c\x01\n\rDeleteDataset\x12\x31.google.cloud.automl.v1beta1.DeleteDatasetRequest\x1a\x1d.google.longrunning.Operation"9\x82\xd3\xe4\x93\x02\x33*1/v1beta1/{name=projects/*/locations/*/datasets/*}\x12\xa4\x01\n\nImportData\x12..google.cloud.automl.v1beta1.ImportDataRequest\x1a\x1d.google.longrunning.Operation"G\x82\xd3\xe4\x93\x02\x41"/v1beta1/{name=projects/*/locations/*/datasets/*/tableSpecs/*}\x12\xc1\x01\n\x0eListTableSpecs\x12\x32.google.cloud.automl.v1beta1.ListTableSpecsRequest\x1a\x33.google.cloud.automl.v1beta1.ListTableSpecsResponse"F\x82\xd3\xe4\x93\x02@\x12>/v1beta1/{parent=projects/*/locations/*/datasets/*}/tableSpecs\x12\xcd\x01\n\x0fUpdateTableSpec\x12\x33.google.cloud.automl.v1beta1.UpdateTableSpecRequest\x1a&.google.cloud.automl.v1beta1.TableSpec"]\x82\xd3\xe4\x93\x02W2I/v1beta1/{table_spec.name=projects/*/locations/*/datasets/*/tableSpecs/*}:\ntable_spec\x12\xc1\x01\n\rGetColumnSpec\x12\x31.google.cloud.automl.v1beta1.GetColumnSpecRequest\x1a\'.google.cloud.automl.v1beta1.ColumnSpec"T\x82\xd3\xe4\x93\x02N\x12L/v1beta1/{name=projects/*/locations/*/datasets/*/tableSpecs/*/columnSpecs/*}\x12\xd2\x01\n\x0fListColumnSpecs\x12\x33.google.cloud.automl.v1beta1.ListColumnSpecsRequest\x1a\x34.google.cloud.automl.v1beta1.ListColumnSpecsResponse"T\x82\xd3\xe4\x93\x02N\x12L/v1beta1/{parent=projects/*/locations/*/datasets/*/tableSpecs/*}/columnSpecs\x12\xe0\x01\n\x10UpdateColumnSpec\x12\x34.google.cloud.automl.v1beta1.UpdateColumnSpecRequest\x1a\'.google.cloud.automl.v1beta1.ColumnSpec"m\x82\xd3\xe4\x93\x02g2X/v1beta1/{column_spec.name=projects/*/locations/*/datasets/*/tableSpecs/*/columnSpecs/*}:\x0b\x63olumn_spec\x12\x9d\x01\n\x0b\x43reateModel\x12/.google.cloud.automl.v1beta1.CreateModelRequest\x1a\x1d.google.longrunning.Operation">\x82\xd3\xe4\x93\x02\x38"//v1beta1/{parent=projects/*/locations/*}/models:\x05model\x12\x95\x01\n\x08GetModel\x12,.google.cloud.automl.v1beta1.GetModelRequest\x1a".google.cloud.automl.v1beta1.Model"7\x82\xd3\xe4\x93\x02\x31\x12//v1beta1/{name=projects/*/locations/*/models/*}\x12\xa6\x01\n\nListModels\x12..google.cloud.automl.v1beta1.ListModelsRequest\x1a/.google.cloud.automl.v1beta1.ListModelsResponse"7\x82\xd3\xe4\x93\x02\x31\x12//v1beta1/{parent=projects/*/locations/*}/models\x12\x96\x01\n\x0b\x44\x65leteModel\x12/.google.cloud.automl.v1beta1.DeleteModelRequest\x1a\x1d.google.longrunning.Operation"7\x82\xd3\xe4\x93\x02\x31*//v1beta1/{name=projects/*/locations/*/models/*}\x12\xa0\x01\n\x0b\x44\x65ployModel\x12/.google.cloud.automl.v1beta1.DeployModelRequest\x1a\x1d.google.longrunning.Operation"A\x82\xd3\xe4\x93\x02;"6/v1beta1/{name=projects/*/locations/*/models/*}:deploy:\x01*\x12\xa6\x01\n\rUndeployModel\x12\x31.google.cloud.automl.v1beta1.UndeployModelRequest\x1a\x1d.google.longrunning.Operation"C\x82\xd3\xe4\x93\x02="8/v1beta1/{name=projects/*/locations/*/models/*}:undeploy:\x01*\x12\xa0\x01\n\x0b\x45xportModel\x12/.google.cloud.automl.v1beta1.ExportModelRequest\x1a\x1d.google.longrunning.Operation"A\x82\xd3\xe4\x93\x02;"6/v1beta1/{name=projects/*/locations/*/models/*}:export:\x01*\x12\xc9\x01\n\x17\x45xportEvaluatedExamples\x12;.google.cloud.automl.v1beta1.ExportEvaluatedExamplesRequest\x1a\x1d.google.longrunning.Operation"R\x82\xd3\xe4\x93\x02L"G/v1beta1/{name=projects/*/locations/*/models/*}:exportEvaluatedExamples:\x01*\x12\xc6\x01\n\x12GetModelEvaluation\x12\x36.google.cloud.automl.v1beta1.GetModelEvaluationRequest\x1a,.google.cloud.automl.v1beta1.ModelEvaluation"J\x82\xd3\xe4\x93\x02\x44\x12\x42/v1beta1/{name=projects/*/locations/*/models/*/modelEvaluations/*}\x12\xd7\x01\n\x14ListModelEvaluations\x12\x38.google.cloud.automl.v1beta1.ListModelEvaluationsRequest\x1a\x39.google.cloud.automl.v1beta1.ListModelEvaluationsResponse"J\x82\xd3\xe4\x93\x02\x44\x12\x42/v1beta1/{parent=projects/*/locations/*/models/*}/modelEvaluations\x1aI\xca\x41\x15\x61utoml.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xb2\x01\n\x1f\x63om.google.cloud.automl.v1beta1B\x0b\x41utoMlProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/automl/v1beta1;automl\xca\x02\x1bGoogle\\Cloud\\AutoMl\\V1beta1\xea\x02\x1eGoogle::Cloud::AutoML::V1beta1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -1451,10 +1451,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="image_classification_model_deployment_metadata", + full_name="google.cloud.automl.v1beta1.DeployModelRequest.image_classification_model_deployment_metadata", + index=1, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="name", full_name="google.cloud.automl.v1beta1.DeployModelRequest.name", - index=1, + index=2, number=1, type=9, cpp_type=9, @@ -1487,7 +1505,7 @@ ) ], serialized_start=2775, - serialized_end=2973, + serialized_end=3105, ) @@ -1525,8 +1543,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2975, - serialized_end=3011, + serialized_start=3107, + serialized_end=3143, ) @@ -1582,8 +1600,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3013, - serialized_end=3124, + serialized_start=3145, + serialized_end=3256, ) @@ -1639,8 +1657,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3127, - serialized_end=3262, + serialized_start=3259, + serialized_end=3394, ) @@ -1678,8 +1696,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3264, - serialized_end=3305, + serialized_start=3396, + serialized_end=3437, ) @@ -1771,8 +1789,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3307, - serialized_end=3407, + serialized_start=3439, + serialized_end=3539, ) @@ -1828,8 +1846,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3409, - serialized_end=3536, + serialized_start=3541, + serialized_end=3668, ) _CREATEDATASETREQUEST.fields_by_name[ @@ -1907,6 +1925,11 @@ ].message_type = ( google_dot_cloud_dot_automl__v1beta1_dot_proto_dot_image__pb2._IMAGEOBJECTDETECTIONMODELDEPLOYMENTMETADATA ) +_DEPLOYMODELREQUEST.fields_by_name[ + "image_classification_model_deployment_metadata" +].message_type = ( + google_dot_cloud_dot_automl__v1beta1_dot_proto_dot_image__pb2._IMAGECLASSIFICATIONMODELDEPLOYMENTMETADATA +) _DEPLOYMODELREQUEST.oneofs_by_name["model_deployment_metadata"].fields.append( _DEPLOYMODELREQUEST.fields_by_name[ "image_object_detection_model_deployment_metadata" @@ -1915,6 +1938,12 @@ _DEPLOYMODELREQUEST.fields_by_name[ "image_object_detection_model_deployment_metadata" ].containing_oneof = _DEPLOYMODELREQUEST.oneofs_by_name["model_deployment_metadata"] +_DEPLOYMODELREQUEST.oneofs_by_name["model_deployment_metadata"].fields.append( + _DEPLOYMODELREQUEST.fields_by_name["image_classification_model_deployment_metadata"] +) +_DEPLOYMODELREQUEST.fields_by_name[ + "image_classification_model_deployment_metadata" +].containing_oneof = _DEPLOYMODELREQUEST.oneofs_by_name["model_deployment_metadata"] _EXPORTMODELREQUEST.fields_by_name[ "output_config" ].message_type = ( @@ -2501,6 +2530,8 @@ The per-domain specific deployment parameters. image_object_detection_model_deployment_metadata: Model deployment metadata specific to Image Object Detection. + image_classification_model_deployment_metadata: + Model deployment metadata specific to Image Classification. name: Resource name of the model to deploy. """, @@ -2666,8 +2697,8 @@ serialized_options=_b( "\312A\025automl.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=3539, - serialized_end=7980, + serialized_start=3671, + serialized_end=8112, methods=[ _descriptor.MethodDescriptor( name="CreateDataset", diff --git a/automl/google/cloud/automl_v1beta1/tables/gcs_client.py b/automl/google/cloud/automl_v1beta1/tables/gcs_client.py index 980fd40b1f90..e5de17c3b0c9 100644 --- a/automl/google/cloud/automl_v1beta1/tables/gcs_client.py +++ b/automl/google/cloud/automl_v1beta1/tables/gcs_client.py @@ -41,7 +41,7 @@ class GcsClient(object): """Uploads Pandas DataFrame to a bucket in Google Cloud Storage.""" - def __init__(self, bucket_name=None, client=None, credentials=None): + def __init__(self, bucket_name=None, client=None, credentials=None, project=None): """Constructor. Args: @@ -54,6 +54,10 @@ def __init__(self, bucket_name=None, client=None, credentials=None): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + project (Optional[str]): The project ID of the GCP project to + attach to the underlying storage client. If none is specified, + the client will attempt to ascertain the credentials from the + environment. """ if storage is None: raise ImportError(_STORAGE_REQUIRED) @@ -61,7 +65,7 @@ def __init__(self, bucket_name=None, client=None, credentials=None): if client is not None: self.client = client elif credentials is not None: - self.client = storage.Client(credentials=credentials) + self.client = storage.Client(credentials=credentials, project=project) else: self.client = storage.Client() @@ -79,7 +83,7 @@ def ensure_bucket_exists(self, project, region): Save the created bucket's name and reuse this for future requests. Args: - project (str): The project that stores the bucket. + project (str): The ID of the project that stores the bucket. region (str): The region of the bucket. Returns: diff --git a/automl/google/cloud/automl_v1beta1/tables/tables_client.py b/automl/google/cloud/automl_v1beta1/tables/tables_client.py index 80e301fbd6bd..7ecd1e6f4e63 100644 --- a/automl/google/cloud/automl_v1beta1/tables/tables_client.py +++ b/automl/google/cloud/automl_v1beta1/tables/tables_client.py @@ -42,6 +42,7 @@ def __init__( self, project=None, region="us-central1", + credentials=None, client=None, prediction_client=None, gcs_client=None, @@ -60,11 +61,11 @@ def __init__( ... Args: - project (Optional[string]): The project all future calls will - default to. Most methods take `project` as an optional - parameter, and can override your choice of `project` supplied - here. - region (Optional[string]): The region all future calls will + project (Optional[str]): The project ID of the GCP project all + future calls will default to. Most methods take `project` as an + optional parameter, and can override your choice of `project` + supplied here. + region (Optional[str]): The region all future calls will default to. Most methods take `region` as an optional parameter, and can override your choice of `region` supplied here. Note, only `us-central1` is supported to-date. @@ -106,20 +107,21 @@ def __init__( if client is None: self.auto_ml_client = gapic.auto_ml_client.AutoMlClient( - client_info=client_info_, **kwargs + credentials=credentials, client_info=client_info_, **kwargs ) else: self.auto_ml_client = client if prediction_client is None: self.prediction_client = gapic.prediction_service_client.PredictionServiceClient( - client_info=client_info_, **kwargs + credentials=credentials, client_info=client_info_, **kwargs ) else: self.prediction_client = prediction_client self.project = project self.region = region + self.credentials = credentials self.gcs_client = gcs_client def __lookup_by_display_name(self, object_type, items, display_name): @@ -406,7 +408,7 @@ def __type_code_to_value_type(self, type_code, value): else: raise ValueError("Unknown type_code: {}".format(type_code)) - def __ensure_gcs_client_is_initialized(self, credentials=None): + def __ensure_gcs_client_is_initialized(self, credentials, project): """Checks if GCS client is initialized. Initializes it if not. Args: @@ -415,9 +417,14 @@ def __ensure_gcs_client_is_initialized(self, credentials=None): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + project (str): The ID of the project to use with the GCS + client. If none is specified, the client will attempt to + ascertain the credentials from the environment. """ if self.gcs_client is None: - self.gcs_client = gcs_client.GcsClient(credentials=credentials) + self.gcs_client = gcs_client.GcsClient( + project=project, credentials=credentials + ) def list_datasets(self, project=None, region=None, **kwargs): """List all datasets in a particular project and region. @@ -439,12 +446,12 @@ def list_datasets(self, project=None, region=None, **kwargs): ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + datasets. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -488,21 +495,21 @@ def get_dataset( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): This is the fully-qualified name generated by the AutoML API for this dataset. This is not to be confused with the human-assigned `dataset_display_name` that is provided when creating a dataset. Either `dataset_name` or `dataset_display_name` must be provided. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): This is the name you provided for the dataset when first creating it. Either `dataset_name` or `dataset_display_name` must be provided. @@ -550,15 +557,15 @@ def create_dataset( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that will own the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (string): + dataset_display_name (str): A human-readable name to refer to this dataset by. Returns: @@ -604,19 +611,19 @@ def delete_dataset( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to delete. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to delete. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -626,8 +633,9 @@ def delete_dataset( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -679,7 +687,7 @@ def import_data( ... >>> d = client.create_dataset(dataset_display_name='my_dataset') >>> - >>> client.import_data(dataset=d, + >>> response = client.import_data(dataset=d, ... gcs_input_uris='gs://cloud-ml-tables-data/bank-marketing.csv') ... >>> def callback(operation_future): @@ -689,12 +697,12 @@ def import_data( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. credentials (Optional[google.auth.credentials.Credentials]): The @@ -702,11 +710,11 @@ def import_data( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to import data into. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to import data into. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -720,19 +728,20 @@ def import_data( `gs://{project}-automl-tables-staging/{uploaded_csv_name}` This parameter must be supplied if neither `gcs_input_uris` nor `bigquery_input_uri` is supplied. - gcs_input_uris (Optional[Union[string, Sequence[string]]]): + gcs_input_uris (Optional[Union[str, Sequence[str]]]): Either a single `gs://..` prefixed URI, or a list of URIs referring to GCS-hosted CSV files containing the data to import. This must be supplied if neither `bigquery_input_uri` nor `pandas_dataframe` is supplied. - bigquery_input_uri (Optional[string]): + bigquery_input_uri (Optional[str]): A URI pointing to the BigQuery table containing the data to import. This must be supplied if neither `gcs_input_uris` nor `pandas_dataframe` is supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -753,7 +762,10 @@ def import_data( request = {} if pandas_dataframe is not None: - self.__ensure_gcs_client_is_initialized(credentials) + project = project or self.project + region = region or self.region + credentials = credentials or self.credentials + self.__ensure_gcs_client_is_initialized(credentials, project) self.gcs_client.ensure_bucket_exists(project, region) gcs_input_uri = self.gcs_client.upload_pandas_dataframe(pandas_dataframe) request = {"gcs_source": {"input_uris": [gcs_input_uri]}} @@ -796,7 +808,7 @@ def export_data( ... >>> d = client.create_dataset(dataset_display_name='my_dataset') >>> - >>> client.export_data(dataset=d, + >>> response = client.export_data(dataset=d, ... gcs_output_uri_prefix='gs://cloud-ml-tables-data/bank-marketing.csv') ... >>> def callback(operation_future): @@ -806,19 +818,19 @@ def export_data( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to export data from. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to export data from. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -826,16 +838,17 @@ def export_data( The `Dataset` instance you want to export data from. This must be supplied if `dataset_display_name` or `dataset_name` are not supplied. - gcs_output_uri_prefix (Optional[Union[string, Sequence[string]]]): + gcs_output_uri_prefix (Optional[Union[str, Sequence[str]]]): A single `gs://..` prefixed URI to export to. This must be supplied if `bigquery_output_uri` is not. - bigquery_output_uri (Optional[string]): + bigquery_output_uri (Optional[str]): A URI pointing to the BigQuery table containing the data to export. This must be supplied if `gcs_output_uri_prefix` is not. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -883,15 +896,15 @@ def get_table_spec(self, table_spec_name, project=None, region=None, **kwargs): >>> Args: - table_spec_name (string): + table_spec_name (str): This is the fully-qualified name generated by the AutoML API for this table spec. - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -933,19 +946,19 @@ def list_table_specs( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to read specs from. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to read specs from. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -995,15 +1008,15 @@ def get_column_spec(self, column_spec_name, project=None, region=None, **kwargs) >>> Args: - column_spec_name (string): + column_spec_name (str): This is the fully-qualified name generated by the AutoML API for this column spec. - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + column. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -1047,29 +1060,29 @@ def list_column_specs( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + columns. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose specs you want to read. If not supplied, the client can determine this name from a source `Dataset` object. table_spec_index (Optional[int]): If no `table_spec_name` was provided, we use this index to determine which table to read column specs from. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to read specs from. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to read specs from. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to read specs from. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of @@ -1145,50 +1158,57 @@ def update_column_spec( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): - If you have initialized the client with a value for `region` it - will be used if this parameter is not supplied. - column_spec_name (Optional[string]): - The name AutoML-assigned name for the column you want to - update. - column_spec_display_name (Optional[string]): - The human-readable name of the column you want to update. If - this is supplied in place of `column_spec_name`, you also need - to provide either a way to lookup the source dataset (using one - of the `dataset*` kwargs), or the `table_spec_name` of the - table this column belongs to. - table_spec_name (Optional[string]): - The AutoML-assigned name for the table whose specs you want to - update. If not supplied, the client can determine this name - from a source `Dataset` object. - table_spec_index (Optional[int]): - If no `table_spec_name` was provided, we use this index to - determine which table to update column specs on. - dataset_display_name (Optional[string]): + dataset (Optional[Dataset]): + The `Dataset` instance you want to update specs on. If no + `table_spec_name` is supplied, this will be used together with + `table_spec_index` to infer the name of table to update specs + on. This must be supplied if `table_spec_name`, `dataset_name` + or `dataset_display_name` are not supplied. + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update specs on. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update specs on. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update specs one. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update specs on. This must be supplied if `table_spec_name`, `dataset` or `dataset_display_name` are not supplied. - dataset (Optional[Dataset]): - The `Dataset` instance you want to update specs on. If no - `table_spec_name` is supplied, this will be used together with - `table_spec_index` to infer the name of table to update specs - on. This must be supplied if `table_spec_name`, `dataset_name` - or `dataset_display_name` are not supplied. + table_spec_name (Optional[str]): + The AutoML-assigned name for the table whose specs you want to + update. If not supplied, the client can determine this name + from a source `Dataset` object. + table_spec_index (Optional[int]): + If no `table_spec_name` was provided, we use this index to + determine which table to update column specs on. + column_spec_name (Optional[str]): + The name AutoML-assigned name for the column you want to + update. + column_spec_display_name (Optional[str]): + The human-readable name of the column you want to update. If + this is supplied in place of `column_spec_name`, you also need + to provide either a way to lookup the source dataset (using one + of the `dataset*` kwargs), or the `table_spec_name` of the + table this column belongs to. + type_code (Optional[str]): + The desired 'type_code' of the column. For more information + on the available types, please see the documentation: + https://cloud.google.com/automl-tables/docs/reference/rpc/google.cloud.automl.v1beta1#typecode + nullable (Optional[bool]): + Set to `True` or `False` to specify if this column's value + must expected to be present in all rows or not. + project (Optional[str]): The ID of the project that owns the + columns. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): + If you have initialized the client with a value for `region` it + will be used if this parameter is not supplied. Returns: A :class:`~google.cloud.automl_v1beta1.types.ColumnSpec` instance. @@ -1270,24 +1290,24 @@ def set_target_column( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - column_spec_name (Optional[string]): + column_spec_name (Optional[str]): The name AutoML-assigned name for the column you want to set as the target column. - column_spec_display_name (Optional[string]): + column_spec_display_name (Optional[str]): The human-readable name of the column you want to set as the target column. If this is supplied in place of `column_spec_name`, you also need to provide either a way to lookup the source dataset (using one of the `dataset*` kwargs), or the `table_spec_name` of the table this column belongs to. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose target column you want to set . If not supplied, the client can determine this name from a source `Dataset` object. @@ -1295,14 +1315,14 @@ def set_target_column( If no `table_spec_name` or `column_spec_name` was provided, we use this index to determine which table to set the target column on. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the target column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the target column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the target column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1383,28 +1403,28 @@ def set_time_column( ... project='my-project', region='us-central1') ... >>> client.set_time_column(dataset_display_name='my_dataset', - ... column_spec_name='Unix Time') + ... column_spec_display_name='Unix Time') ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - column_spec_name (Optional[string]): + column_spec_name (Optional[str]): The name AutoML-assigned name for the column you want to set as the time column. - column_spec_display_name (Optional[string]): + column_spec_display_name (Optional[str]): The human-readable name of the column you want to set as the time column. If this is supplied in place of `column_spec_name`, you also need to provide either a way to lookup the source dataset (using one of the `dataset*` kwargs), or the `table_spec_name` of the table this column belongs to. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose time column you want to set . If not supplied, the client can determine this name from a source `Dataset` object. @@ -1412,14 +1432,14 @@ def set_time_column( If no `table_spec_name` or `column_spec_name` was provided, we use this index to determine which table to set the time column on. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the time column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the time column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the time column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1495,26 +1515,26 @@ def clear_time_column( ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json') ... project='my-project', region='us-central1') ... - >>> client.set_time_column(dataset_display_name='my_dataset') + >>> client.clear_time_column(dataset_display_name='my_dataset') >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the time column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the time column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the time column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1585,24 +1605,24 @@ def set_weight_column( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - column_spec_name (Optional[string]): + column_spec_name (Optional[str]): The name AutoML-assigned name for the column you want to set as the weight column. - column_spec_display_name (Optional[string]): + column_spec_display_name (Optional[str]): The human-readable name of the column you want to set as the weight column. If this is supplied in place of `column_spec_name`, you also need to provide either a way to lookup the source dataset (using one of the `dataset*` kwargs), or the `table_spec_name` of the table this column belongs to. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose weight column you want to set . If not supplied, the client can determine this name from a source `Dataset` object. @@ -1610,14 +1630,14 @@ def set_weight_column( If no `table_spec_name` or `column_spec_name` was provided, we use this index to determine which table to set the weight column on. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the weight column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the weight column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the weight column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1697,22 +1717,22 @@ def clear_weight_column( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the weight column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the weight column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the weight column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1782,24 +1802,24 @@ def set_test_train_column( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - column_spec_name (Optional[string]): + column_spec_name (Optional[str]): The name AutoML-assigned name for the column you want to set as the test/train column. - column_spec_display_name (Optional[string]): + column_spec_display_name (Optional[str]): The human-readable name of the column you want to set as the test/train column. If this is supplied in place of `column_spec_name`, you also need to provide either a way to lookup the source dataset (using one of the `dataset*` kwargs), or the `table_spec_name` of the table this column belongs to. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose test/train column you want to set . If not supplied, the client can determine this name from a source `Dataset` object. @@ -1807,14 +1827,14 @@ def set_test_train_column( If no `table_spec_name` or `column_spec_name` was provided, we use this index to determine which table to set the test/train column on. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the test/train column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the test/train column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the test/train column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1895,22 +1915,22 @@ def clear_test_train_column( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the test/train column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the test/train column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the test/train column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1970,12 +1990,12 @@ def list_models(self, project=None, region=None, **kwargs): ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + models. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -2024,19 +2044,19 @@ def list_model_evaluations( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to list evaluations for. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to list evaluations for. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2052,6 +2072,12 @@ def list_model_evaluations( instances. You can also iterate over the pages of the response using its `pages` property. + For a regression model, there will only be one evaluation. For a + classification model there will be on for each classification + label, as well as one for micro-averaged metrics. See more + documentation here: + https://cloud.google.com/automl-tables/docs/evaluate#automl-tables-list-model-evaluations-cli-curl:w + Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. @@ -2096,33 +2122,37 @@ def create_model( ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json') ... project='my-project', region='us-central1') ... - >>> m = client.create_model('my_model', dataset_display_name='my_dataset') + >>> m = client.create_model( + ... 'my_model', + ... dataset_display_name='my_dataset', + ... train_budget_milli_node_hours=1000 + ... ) >>> >>> m.result() # blocks on result >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that will own the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (string): + model_display_name (str): A human-readable name to refer to this model by. train_budget_milli_node_hours (int): The amount of time (in thousandths of an hour) to spend training. This value must be between 1,000 and 72,000 inclusive (between 1 and 72 hours). - optimization_objective (string): + optimization_objective (str): The metric AutoML tables should optimize for. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to train your model on. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to train your model on. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -2132,15 +2162,17 @@ def create_model( are not supplied. model_metadata (Optional[Dict]): Optional model metadata to supply to the client. - include_column_spec_names(Optional[string]): + include_column_spec_names(Optional[str]): The list of the names of the columns you want to include to train your model on. - exclude_column_spec_names(Optional[string]): + exclude_column_spec_names(Optional[str]): The list of the names of the columns you want to exclude and not train your model on. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. + Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. @@ -2245,19 +2277,19 @@ def delete_model( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to delete. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to delete. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2267,8 +2299,9 @@ def delete_model( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -2312,15 +2345,15 @@ def get_model_evaluation( >>> Args: - model_evaluation_name (string): + model_evaluation_name (str): This is the fully-qualified name generated by the AutoML API for this model evaluation. - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -2359,21 +2392,21 @@ def get_model( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_name (Optional[string]): + model_name (Optional[str]): This is the fully-qualified name generated by the AutoML API for this model. This is not to be confused with the human-assigned `model_display_name` that is provided when creating a model. Either `model_name` or `model_display_name` must be provided. - model_display_name (Optional[string]): + model_display_name (Optional[str]): This is the name you provided for the model when first creating it. Either `model_name` or `model_display_name` must be provided. @@ -2428,19 +2461,19 @@ def deploy_model( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to deploy. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to deploy. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2450,8 +2483,9 @@ def deploy_model( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -2499,19 +2533,19 @@ def undeploy_model( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to undeploy. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to undeploy. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2521,8 +2555,9 @@ def undeploy_model( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -2574,22 +2609,22 @@ def predict( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - inputs (Union[List[string], Dict[string, string]]): + inputs (Union[List[str], Dict[str, str]]): Either the sorted list of column values to predict with, or a key-value map of column display name to value to predict with. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to predict with. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to predict with. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2676,12 +2711,12 @@ def batch_predict( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. credentials (Optional[google.auth.credentials.Credentials]): The @@ -2695,24 +2730,24 @@ def batch_predict( staged to GCS in `gs://{project}-automl-tables-staging/{uploaded_csv_name}` This must be supplied if neither `gcs_input_uris` nor `bigquery_input_uri` is supplied. - gcs_input_uris (Optional(Union[List[string], string])) + gcs_input_uris (Optional(Union[List[str], str])) Either a list of or a single GCS URI containing the data you want to predict off of. This must be supplied if neither `pandas_dataframe` nor `bigquery_input_uri` is supplied. - gcs_output_uri_prefix (Optional[string]) + gcs_output_uri_prefix (Optional[str]) The folder in GCS you want to write output to. This must be supplied if `bigquery_output_uri` is not. - bigquery_input_uri (Optional[string]) + bigquery_input_uri (Optional[str]) The BigQuery table to input data from. This must be supplied if neither `pandas_dataframe` nor `gcs_input_uris` is supplied. - bigquery_output_uri (Optional[string]) + bigquery_output_uri (Optional[str]) The BigQuery table to output data to. This must be supplied if `gcs_output_uri_prefix` is not. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to predict with. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to predict with. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2722,8 +2757,9 @@ def batch_predict( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -2744,7 +2780,10 @@ def batch_predict( input_request = None if pandas_dataframe is not None: - self.__ensure_gcs_client_is_initialized(credentials) + project = project or self.project + region = region or self.region + credentials = credentials or self.credentials + self.__ensure_gcs_client_is_initialized(credentials, project) self.gcs_client.ensure_bucket_exists(project, region) gcs_input_uri = self.gcs_client.upload_pandas_dataframe(pandas_dataframe) input_request = {"gcs_source": {"input_uris": [gcs_input_uri]}} diff --git a/automl/noxfile.py b/automl/noxfile.py index 19fb3148ffd8..342fcdf41e3e 100644 --- a/automl/noxfile.py +++ b/automl/noxfile.py @@ -69,7 +69,6 @@ def default(session): session.install("mock", "pytest", "pytest-cov") for local_dep in LOCAL_DEPS: session.install("-e", local_dep) - session.install("-e", ".") session.install("-e", ".[pandas,storage]") # Run py.test against the unit tests. @@ -117,7 +116,6 @@ def system(session): for local_dep in LOCAL_DEPS: session.install("-e", local_dep) session.install("-e", "../test_utils/") - session.install("-e", ".") session.install("-e", ".[pandas,storage]") # Run py.test against the system tests. @@ -144,7 +142,7 @@ def cover(session): def docs(session): """Build the docs for this library.""" - session.install("-e", ".") + session.install("-e", ".[pandas,storage]") session.install("sphinx", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) diff --git a/automl/setup.py b/automl/setup.py index c76f945594e8..a810e86ef966 100644 --- a/automl/setup.py +++ b/automl/setup.py @@ -19,7 +19,7 @@ name = "google-cloud-automl" description = "Cloud AutoML API client library" -version = "0.5.0" +version = "0.7.1" release_status = "Development Status :: 3 - Alpha" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", diff --git a/automl/synth.metadata b/automl/synth.metadata index 7cf7214eecc8..641ff4cd295e 100644 --- a/automl/synth.metadata +++ b/automl/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-26T23:45:30.964132Z", + "updateTime": "2019-10-08T12:12:09.104671Z", "sources": [ { "generator": { "name": "artman", - "version": "0.35.0", - "dockerImage": "googleapis/artman@sha256:97ef134b6b1cc2c21868960d3b4352524023fb25b61fc137ca0783ce3c08c2cd" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "650caad718bb063f189405c23972dc9818886358", - "internalRef": "265565344" + "sha": "122bdbf877ad87439f8dd9d1474a8e5dde188087", + "internalRef": "273381131" } }, { @@ -34,6 +34,16 @@ "generator": "gapic", "config": "google/cloud/automl/artman_automl_v1beta1.yaml" } + }, + { + "client": { + "source": "googleapis", + "apiName": "automl", + "apiVersion": "v1", + "language": "python", + "generator": "gapic", + "config": "google/cloud/automl/artman_automl_v1.yaml" + } } ] } \ No newline at end of file diff --git a/automl/synth.py b/automl/synth.py index 06407e9fa019..937bb0abfa5d 100644 --- a/automl/synth.py +++ b/automl/synth.py @@ -21,7 +21,7 @@ gapic = gcp.GAPICGenerator() common = gcp.CommonTemplates() -versions = ["v1beta1"] +versions = ["v1beta1", "v1"] # ---------------------------------------------------------------------------- @@ -38,15 +38,18 @@ # Use the highest version library to generate import alias. s.move(library / "google/cloud/automl.py") -# Add tables client to v1beta1 +# Add TablesClient and GcsClient to v1beta1 s.replace( f"google/cloud/automl_v1beta1/__init__.py", f"from google.cloud.automl_v1beta1.gapic import prediction_service_client", f"from google.cloud.automl_v1beta1.gapic import prediction_service_client\n" - f"from google.cloud.automl_v1beta1.tables import tables_client" + f"from google.cloud.automl_v1beta1.tables import tables_client\n" + f"from google.cloud.automl_v1beta1.tables import gcs_client" f"\n\n" f"class TablesClient(tables_client.TablesClient):" - f" __doc__ = tables_client.TablesClient.__doc__", + f" __doc__ = tables_client.TablesClient.__doc__" + f"\n\nclass GcsClient(gcs_client.GcsClient):" + f" __doc__ = gcs_client.GcsClient.__doc__" ) s.replace( @@ -57,7 +60,7 @@ 'AutoMlClient', 'PredictionServiceClient', \)""", - f'__all__ = ("enums", "types", "AutoMlClient", "PredictionServiceClient", "TablesClient")', + f'__all__ = ("enums", "types", "AutoMlClient", "PredictionServiceClient", "TablesClient", "GcsClient")', ) # Fixup issues in generated code @@ -110,6 +113,14 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(unit_cov_level=82, cov_level=83) + s.move(templated_files) +# install with extras (pandas, storage) +s.replace( + "noxfile.py", + """session\.install\(['"]-e['"], ['"]\.['"]\)""", + """session.install("-e", ".[pandas,storage]")""" +) + s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/automl/tests/system/gapic/v1beta1/test_system_tables_client_v1.py b/automl/tests/system/gapic/v1beta1/test_system_tables_client_v1.py index 01c49541dc70..27f2e884fb79 100644 --- a/automl/tests/system/gapic/v1beta1/test_system_tables_client_v1.py +++ b/automl/tests/system/gapic/v1beta1/test_system_tables_client_v1.py @@ -26,13 +26,14 @@ from google.api_core import exceptions from google.cloud.automl_v1beta1.gapic import enums +from test_utils.vpcsc_config import vpcsc_config + PROJECT = os.environ["PROJECT_ID"] REGION = "us-central1" MAX_WAIT_TIME_SECONDS = 30 MAX_SLEEP_TIME_SECONDS = 5 STATIC_DATASET = "test_dataset_do_not_delete" STATIC_MODEL = "test_model_do_not_delete" -RUNNING_IN_VPCSC = os.getenv("GOOGLE_CLOUD_TESTS_IN_VPCSC", "").lower() == "true" ID = "{rand}_{time}".format( rand="".join( @@ -58,7 +59,7 @@ def cancel_and_wait(self, op): sleep_time = min(sleep_time * 2, MAX_SLEEP_TIME_SECONDS) assert op.cancelled() - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_list_datasets(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) dataset = self.ensure_dataset_ready(client) @@ -69,7 +70,7 @@ def test_list_datasets(self): ) ) - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_list_models(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) model = self.ensure_model_ready(client) @@ -86,7 +87,7 @@ def test_create_delete_dataset(self): ) client.delete_dataset(dataset=dataset) - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_import_data(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) display_name = _id("t_import") @@ -98,7 +99,7 @@ def test_import_data(self): self.cancel_and_wait(op) client.delete_dataset(dataset=dataset) - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_import_pandas_dataframe(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) display_name = _id("t_import_pandas") @@ -127,7 +128,7 @@ def ensure_dataset_ready(self, client): return dataset - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_list_column_specs(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) dataset = self.ensure_dataset_ready(client) @@ -142,21 +143,21 @@ def test_list_column_specs(self): ) ) - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_get_column_spec(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) dataset = self.ensure_dataset_ready(client) name = [d for d in client.list_column_specs(dataset=dataset)][0].name assert client.get_column_spec(name).name == name - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_list_table_specs(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) dataset = self.ensure_dataset_ready(client) name = [d for d in client.list_table_specs(dataset=dataset)][0].name assert client.get_table_spec(name).name == name - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_set_column_nullable(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) dataset = self.ensure_dataset_ready(client) @@ -166,7 +167,7 @@ def test_set_column_nullable(self): columns = {c.display_name: c for c in client.list_column_specs(dataset=dataset)} assert columns["POutcome"].data_type.nullable == True - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_set_target_column(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) dataset = self.ensure_dataset_ready(client) @@ -178,7 +179,7 @@ def test_set_target_column(self): "/{}".format(metadata.target_column_spec_id) ) - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_set_weight_column(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) dataset = self.ensure_dataset_ready(client) @@ -190,7 +191,7 @@ def test_set_weight_column(self): "/{}".format(metadata.weight_column_spec_id) ) - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_set_weight_and_target_column(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) dataset = self.ensure_dataset_ready(client) @@ -206,7 +207,7 @@ def test_set_weight_and_target_column(self): "/{}".format(metadata.target_column_spec_id) ) - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_create_delete_model(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) dataset = self.ensure_dataset_ready(client) @@ -218,7 +219,7 @@ def test_create_delete_model(self): self.cancel_and_wait(op) client.delete_model(model_display_name=display_name) - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_list_model_evaluations(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) model = self.ensure_model_online(client) @@ -233,14 +234,14 @@ def test_list_model_evaluations(self): ) ) - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_get_model_evaluation(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) model = self.ensure_model_online(client) name = [m for m in client.list_model_evaluations(model=model)][0].name assert client.get_model_evaluation(model_evaluation_name=name).name == name - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_online_predict(self): client = automl_v1beta1.TablesClient(project=PROJECT, region=REGION) model = self.ensure_model_online(client) diff --git a/automl/tests/unit/gapic/v1/test_auto_ml_client_v1.py b/automl/tests/unit/gapic/v1/test_auto_ml_client_v1.py new file mode 100644 index 000000000000..cdf4555f1969 --- /dev/null +++ b/automl/tests/unit/gapic/v1/test_auto_ml_client_v1.py @@ -0,0 +1,780 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests.""" + +import mock +import pytest + +from google.rpc import status_pb2 + +from google.cloud import automl_v1 +from google.cloud.automl_v1.proto import dataset_pb2 +from google.cloud.automl_v1.proto import io_pb2 +from google.cloud.automl_v1.proto import model_evaluation_pb2 +from google.cloud.automl_v1.proto import model_pb2 +from google.cloud.automl_v1.proto import service_pb2 +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, method, request_serializer=None, response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestAutoMlClient(object): + def test_create_dataset(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + description = "description-1724546052" + example_count = 1517063674 + etag = "etag3123477" + expected_response = { + "name": name, + "display_name": display_name, + "description": description, + "example_count": example_count, + "etag": etag, + } + expected_response = dataset_pb2.Dataset(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_create_dataset", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + dataset = {} + + response = client.create_dataset(parent, dataset) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.CreateDatasetRequest( + parent=parent, dataset=dataset + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_dataset_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_create_dataset_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + dataset = {} + + response = client.create_dataset(parent, dataset) + exception = response.exception() + assert exception.errors[0] == error + + def test_update_dataset(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + description = "description-1724546052" + example_count = 1517063674 + etag = "etag3123477" + expected_response = { + "name": name, + "display_name": display_name, + "description": description, + "example_count": example_count, + "etag": etag, + } + expected_response = dataset_pb2.Dataset(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + dataset = {} + update_mask = {} + + response = client.update_dataset(dataset, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.UpdateDatasetRequest( + dataset=dataset, update_mask=update_mask + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_dataset_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + dataset = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_dataset(dataset, update_mask) + + def test_get_dataset(self): + # Setup Expected Response + name_2 = "name2-1052831874" + display_name = "displayName1615086568" + description = "description-1724546052" + example_count = 1517063674 + etag = "etag3123477" + expected_response = { + "name": name_2, + "display_name": display_name, + "description": description, + "example_count": example_count, + "etag": etag, + } + expected_response = dataset_pb2.Dataset(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + + response = client.get_dataset(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.GetDatasetRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_dataset_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + + with pytest.raises(CustomException): + client.get_dataset(name) + + def test_list_datasets(self): + # Setup Expected Response + next_page_token = "" + datasets_element = {} + datasets = [datasets_element] + expected_response = {"next_page_token": next_page_token, "datasets": datasets} + expected_response = service_pb2.ListDatasetsResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_datasets(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.datasets[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = service_pb2.ListDatasetsRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_datasets_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_datasets(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_delete_dataset(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_delete_dataset", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + + response = client.delete_dataset(name) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.DeleteDatasetRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_dataset_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_delete_dataset_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + + response = client.delete_dataset(name) + exception = response.exception() + assert exception.errors[0] == error + + def test_import_data(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_import_data", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + input_config = {} + + response = client.import_data(name, input_config) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.ImportDataRequest( + name=name, input_config=input_config + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_import_data_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_import_data_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + input_config = {} + + response = client.import_data(name, input_config) + exception = response.exception() + assert exception.errors[0] == error + + def test_export_data(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_export_data", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + output_config = {} + + response = client.export_data(name, output_config) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.ExportDataRequest( + name=name, output_config=output_config + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_export_data_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_export_data_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + output_config = {} + + response = client.export_data(name, output_config) + exception = response.exception() + assert exception.errors[0] == error + + def test_create_model(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + dataset_id = "datasetId-2115646910" + expected_response = { + "name": name, + "display_name": display_name, + "dataset_id": dataset_id, + } + expected_response = model_pb2.Model(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_create_model", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + model = {} + + response = client.create_model(parent, model) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.CreateModelRequest(parent=parent, model=model) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_model_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_create_model_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + model = {} + + response = client.create_model(parent, model) + exception = response.exception() + assert exception.errors[0] == error + + def test_get_model(self): + # Setup Expected Response + name_2 = "name2-1052831874" + display_name = "displayName1615086568" + dataset_id = "datasetId-2115646910" + expected_response = { + "name": name_2, + "display_name": display_name, + "dataset_id": dataset_id, + } + expected_response = model_pb2.Model(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + + response = client.get_model(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.GetModelRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_model_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + + with pytest.raises(CustomException): + client.get_model(name) + + def test_update_model(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + dataset_id = "datasetId-2115646910" + expected_response = { + "name": name, + "display_name": display_name, + "dataset_id": dataset_id, + } + expected_response = model_pb2.Model(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + model = {} + update_mask = {} + + response = client.update_model(model, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.UpdateModelRequest( + model=model, update_mask=update_mask + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_model_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + model = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_model(model, update_mask) + + def test_list_models(self): + # Setup Expected Response + next_page_token = "" + model_element = {} + model = [model_element] + expected_response = {"next_page_token": next_page_token, "model": model} + expected_response = service_pb2.ListModelsResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_models(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.model[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = service_pb2.ListModelsRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_models_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_models(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_delete_model(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_delete_model", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + + response = client.delete_model(name) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.DeleteModelRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_model_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_delete_model_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + + response = client.delete_model(name) + exception = response.exception() + assert exception.errors[0] == error + + def test_get_model_evaluation(self): + # Setup Expected Response + name_2 = "name2-1052831874" + annotation_spec_id = "annotationSpecId60690191" + evaluated_example_count = 277565350 + expected_response = { + "name": name_2, + "annotation_spec_id": annotation_spec_id, + "evaluated_example_count": evaluated_example_count, + } + expected_response = model_evaluation_pb2.ModelEvaluation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.model_evaluation_path( + "[PROJECT]", "[LOCATION]", "[MODEL]", "[MODEL_EVALUATION]" + ) + + response = client.get_model_evaluation(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.GetModelEvaluationRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_model_evaluation_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + name = client.model_evaluation_path( + "[PROJECT]", "[LOCATION]", "[MODEL]", "[MODEL_EVALUATION]" + ) + + with pytest.raises(CustomException): + client.get_model_evaluation(name) + + def test_list_model_evaluations(self): + # Setup Expected Response + next_page_token = "" + model_evaluation_element = {} + model_evaluation = [model_evaluation_element] + expected_response = { + "next_page_token": next_page_token, + "model_evaluation": model_evaluation, + } + expected_response = service_pb2.ListModelEvaluationsResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + filter_ = "filter-1274492040" + + paged_list_response = client.list_model_evaluations(parent, filter_) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.model_evaluation[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = service_pb2.ListModelEvaluationsRequest( + parent=parent, filter=filter_ + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_model_evaluations_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + parent = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + filter_ = "filter-1274492040" + + paged_list_response = client.list_model_evaluations(parent, filter_) + with pytest.raises(CustomException): + list(paged_list_response) diff --git a/automl/tests/unit/gapic/v1/test_prediction_service_client_v1.py b/automl/tests/unit/gapic/v1/test_prediction_service_client_v1.py new file mode 100644 index 000000000000..02d12f0ad4d3 --- /dev/null +++ b/automl/tests/unit/gapic/v1/test_prediction_service_client_v1.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests.""" + +import mock +import pytest + +from google.cloud import automl_v1 +from google.cloud.automl_v1.proto import data_items_pb2 +from google.cloud.automl_v1.proto import prediction_service_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, method, request_serializer=None, response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestPredictionServiceClient(object): + def test_predict(self): + # Setup Expected Response + expected_response = {} + expected_response = prediction_service_pb2.PredictResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.PredictionServiceClient() + + # Setup Request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + payload = {} + + response = client.predict(name, payload) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = prediction_service_pb2.PredictRequest( + name=name, payload=payload + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_predict_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.PredictionServiceClient() + + # Setup request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + payload = {} + + with pytest.raises(CustomException): + client.predict(name, payload) diff --git a/automl/tests/unit/gapic/v1beta1/test_gcs_client_v1beta1.py b/automl/tests/unit/gapic/v1beta1/test_gcs_client_v1beta1.py index 49d4a0f85423..f7a2e27ab7d8 100644 --- a/automl/tests/unit/gapic/v1beta1/test_gcs_client_v1beta1.py +++ b/automl/tests/unit/gapic/v1beta1/test_gcs_client_v1beta1.py @@ -22,8 +22,11 @@ import re from google.api_core import exceptions +from google.auth.credentials import AnonymousCredentials from google.cloud import automl_v1beta1 +PROJECT = "project" + class TestGcsClient(object): def gcs_client(self, bucket_name=None, client_attrs={}): @@ -32,6 +35,24 @@ def gcs_client(self, bucket_name=None, client_attrs={}): bucket_name=bucket_name, client=client_mock ) + def test_init_with_project_and_credentials(self): + # helper for checking that the storage client is initialized with the + # passed in project and credentials. + class FakeStorageClient: + def __init__(self, project=None, credentials=None): + self.project = project + self.credentials = credentials + + patch = mock.patch("google.cloud.storage.Client", new=FakeStorageClient) + with patch: + credentials = AnonymousCredentials() + gcs_client = automl_v1beta1.tables.gcs_client.GcsClient( + project=PROJECT, credentials=credentials + ) + assert isinstance(gcs_client.client, FakeStorageClient) + assert gcs_client.client.project == PROJECT + assert gcs_client.client.credentials == credentials + def test_ensure_bucket_exists(self): mock_bucket = mock.Mock() gcs_client = self.gcs_client( diff --git a/automl/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py b/automl/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py index aa1babfa8752..516a4b76080d 100644 --- a/automl/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py +++ b/automl/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py @@ -20,8 +20,9 @@ import pandas import pytest -from google.cloud import automl_v1beta1 from google.api_core import exceptions +from google.auth.credentials import AnonymousCredentials +from google.cloud import automl_v1beta1 from google.cloud.automl_v1beta1.proto import data_types_pb2 PROJECT = "project" @@ -214,6 +215,33 @@ def test_import_pandas_dataframe(self): "name", {"gcs_source": {"input_uris": ["uri"]}} ) + def test_import_pandas_dataframe_init_gcs(self): + client = automl_v1beta1.TablesClient( + client=mock.Mock(), + prediction_client=mock.Mock(), + project=PROJECT, + region=REGION, + credentials=AnonymousCredentials(), + ) + + dataframe = pandas.DataFrame({}) + patch = mock.patch( + "google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient", + bucket_name="my_bucket", + ) + with patch as MockGcsClient: + mockInstance = MockGcsClient.return_value + mockInstance.upload_pandas_dataframe.return_value = "uri" + + client.import_data(dataset_name="name", pandas_dataframe=dataframe) + + assert client.gcs_client is mockInstance + client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION) + client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe) + client.auto_ml_client.import_data.assert_called_with( + "name", {"gcs_source": {"input_uris": ["uri"]}} + ) + def test_import_gcs_uri(self): client = self.tables_client({"import_data.return_value": None}, {}) client.import_data(dataset_name="name", gcs_input_uris="uri") @@ -1220,6 +1248,40 @@ def test_batch_predict_pandas_dataframe(self): {"gcs_destination": {"output_uri_prefix": "gs://output"}}, ) + def test_batch_predict_pandas_dataframe_init_gcs(self): + client = automl_v1beta1.TablesClient( + client=mock.Mock(), + prediction_client=mock.Mock(), + project=PROJECT, + region=REGION, + credentials=AnonymousCredentials(), + ) + + dataframe = pandas.DataFrame({}) + patch = mock.patch( + "google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient", + bucket_name="my_bucket", + ) + with patch as MockGcsClient: + mockInstance = MockGcsClient.return_value + mockInstance.upload_pandas_dataframe.return_value = "gs://input" + + dataframe = pandas.DataFrame({}) + client.batch_predict( + model_name="my_model", + pandas_dataframe=dataframe, + gcs_output_uri_prefix="gs://output", + ) + + client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION) + client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe) + + client.prediction_client.batch_predict.assert_called_with( + "my_model", + {"gcs_source": {"input_uris": ["gs://input"]}}, + {"gcs_destination": {"output_uri_prefix": "gs://output"}}, + ) + def test_batch_predict_gcs(self): client = self.tables_client({}, {}) client.batch_predict( @@ -1317,3 +1379,25 @@ def test_batch_predict_no_model(self): ) client.auto_ml_client.list_models.assert_not_called() client.prediction_client.batch_predict.assert_not_called() + + def test_auto_ml_client_credentials(self): + credentials_mock = mock.Mock() + patch_auto_ml_client = mock.patch( + "google.cloud.automl_v1beta1.gapic.auto_ml_client.AutoMlClient" + ) + with patch_auto_ml_client as MockAutoMlClient: + client = automl_v1beta1.TablesClient(credentials=credentials_mock) + _, auto_ml_client_kwargs = MockAutoMlClient.call_args + assert "credentials" in auto_ml_client_kwargs + assert auto_ml_client_kwargs["credentials"] == credentials_mock + + def test_prediction_client_credentials(self): + credentials_mock = mock.Mock() + patch_prediction_client = mock.patch( + "google.cloud.automl_v1beta1.gapic.prediction_service_client.PredictionServiceClient" + ) + with patch_prediction_client as MockPredictionClient: + client = automl_v1beta1.TablesClient(credentials=credentials_mock) + _, prediction_client_kwargs = MockPredictionClient.call_args + assert "credentials" in prediction_client_kwargs + assert prediction_client_kwargs["credentials"] == credentials_mock diff --git a/bigquery/CHANGELOG.md b/bigquery/CHANGELOG.md index c938c05b7f13..1560e456a24e 100644 --- a/bigquery/CHANGELOG.md +++ b/bigquery/CHANGELOG.md @@ -4,6 +4,65 @@ [1]: https://pypi.org/project/google-cloud-bigquery/#history +## 1.21.0 + +10-16-2019 10:33 PDT + + +### New Features + +- add ability to pass in a table ID instead of a query to the `%%bigquery` magic ([#9170](https://github.com/googleapis/google-cloud-python/pull/9170)) +- add support for custom `QueryJobConfig` in `BigQuery.cursor.execute` method ([#9278](https://github.com/googleapis/google-cloud-python/pull/9278)) +- store `QueryJob` to destination var on error in `%%bigquery` magic ([#9245](https://github.com/googleapis/google-cloud-python/pull/9245)) +- add script statistics to job resource ([#9428](https://github.com/googleapis/google-cloud-python/pull/9428)) +- add support for sheets ranges ([#9416](https://github.com/googleapis/google-cloud-python/pull/9416)) +- add support for listing jobs by parent job ([#9225](https://github.com/googleapis/google-cloud-python/pull/9225)) +- expose customer managed encryption key for ML models ([#9302](https://github.com/googleapis/google-cloud-python/pull/9302)) +- add `Dataset.default_partition_expiration_ms` and `Table.require_partition_filter` properties ([#9464](https://github.com/googleapis/google-cloud-python/pull/9464)) + +### Dependencies + +- restrict version range of `google-resumable-media` ([#9243](https://github.com/googleapis/google-cloud-python/pull/9243)) + +### Documentation + +- document how to load data as JSON string ([#9231](https://github.com/googleapis/google-cloud-python/pull/9231)) +- standardize comments and formatting in existing code samples ([#9212](https://github.com/googleapis/google-cloud-python/pull/9212)) +- rewrite docstrings in Google style ([#9326](https://github.com/googleapis/google-cloud-python/pull/9326)) +- fix incorrect links to REST API in reference docs ([#9436](https://github.com/googleapis/google-cloud-python/pull/9436)) + +### Internal / Testing Changes + +- add code samples to lint check ([#9277](https://github.com/googleapis/google-cloud-python/pull/9277)) +- update code samples to use strings for table and dataset IDs ([#9136](https://github.com/googleapis/google-cloud-python/pull/9136)) +- simplify scripting system test to reduce flakiness ([#9458](https://github.com/googleapis/google-cloud-python/pull/9458)) + +## 1.20.0 + +09-13-2019 11:22 PDT + + +### Implementation Changes +- Change default endpoint to bigquery.googleapis.com ([#9213](https://github.com/googleapis/google-cloud-python/pull/9213)) +- Change the default value of Cursor instances' `arraysize` attribute to None ([#9199](https://github.com/googleapis/google-cloud-python/pull/9199)) +- Deprecate automatic schema conversion. ([#9176](https://github.com/googleapis/google-cloud-python/pull/9176)) +- Fix `list_rows()` max results with BQ storage client ([#9178](https://github.com/googleapis/google-cloud-python/pull/9178)) + +### New Features +- Add `Model.encryption_config`. (via synth) ([#9214](https://github.com/googleapis/google-cloud-python/pull/9214)) +- Add `Client.insert_rows_from_dataframe()` method ([#9162](https://github.com/googleapis/google-cloud-python/pull/9162)) +- Add support for array parameters to `Cursor.execute()`. ([#9189](https://github.com/googleapis/google-cloud-python/pull/9189)) +- Add support for project IDs with org prefix to `Table.from_string()` factory. ([#9161](https://github.com/googleapis/google-cloud-python/pull/9161)) +- Add `--max_results` option to Jupyter magics ([#9169](https://github.com/googleapis/google-cloud-python/pull/9169)) +- Autofetch table schema on load if not provided. ([#9108](https://github.com/googleapis/google-cloud-python/pull/9108)) +- Add `max_results` parameter to `QueryJob.result()`. ([#9167](https://github.com/googleapis/google-cloud-python/pull/9167)) + +### Documentation +- Fix doc link. ([#9200](https://github.com/googleapis/google-cloud-python/pull/9200)) + +### Internal / Testing Changes +- Revert "Disable failing snippets test ([#9156](https://github.com/googleapis/google-cloud-python/pull/9156))." ([#9220](https://github.com/googleapis/google-cloud-python/pull/9220)) + ## 1.19.0 09-03-2019 14:33 PDT diff --git a/bigquery/docs/reference.rst b/bigquery/docs/reference.rst index e01443808795..981059de5226 100644 --- a/bigquery/docs/reference.rst +++ b/bigquery/docs/reference.rst @@ -83,12 +83,13 @@ Table .. autosummary:: :toctree: generated + table.PartitionRange + table.RangePartitioning + table.Row + table.RowIterator table.Table table.TableListItem table.TableReference - table.Row - table.RowIterator - table.EncryptionConfiguration table.TimePartitioning table.TimePartitioningType @@ -173,6 +174,13 @@ Enums enums.StandardSqlDataTypes +Encryption Configuration +======================== + +.. autosummary:: + :toctree: generated + + encryption_configuration.EncryptionConfiguration Additional Types ================ diff --git a/bigquery/docs/snippets.py b/bigquery/docs/snippets.py index 387f35dfac07..83795460a955 100644 --- a/bigquery/docs/snippets.py +++ b/bigquery/docs/snippets.py @@ -13,11 +13,9 @@ # limitations under the License. """Testable usage examples for Google BigQuery API wrapper - Each example function takes a ``client`` argument (which must be an instance of :class:`google.cloud.bigquery.client.Client`) and uses it to perform a task with the API. - To facilitate running the examples as system tests, each example is also passed a ``to_delete`` list; the function adds to the list any objects created which need to be deleted during teardown. @@ -181,7 +179,7 @@ def test_create_table_cmek(client, to_delete): # Set the encryption key to use for the table. # TODO: Replace this key with a key you have created in Cloud KMS. kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) table.encryption_configuration = bigquery.EncryptionConfiguration( kms_key_name=kms_key_name @@ -303,47 +301,6 @@ def test_load_and_query_partitioned_table(client, to_delete): assert len(rows) == 29 -# [START bigquery_table_exists] -def table_exists(client, table_reference): - """Return if a table exists. - - Args: - client (google.cloud.bigquery.client.Client): - A client to connect to the BigQuery API. - table_reference (google.cloud.bigquery.table.TableReference): - A reference to the table to look for. - - Returns: - bool: ``True`` if the table exists, ``False`` otherwise. - """ - from google.cloud.exceptions import NotFound - - try: - client.get_table(table_reference) - return True - except NotFound: - return False - - -# [END bigquery_table_exists] - - -def test_table_exists(client, to_delete): - """Determine if a table exists.""" - DATASET_ID = "get_table_dataset_{}".format(_millis()) - TABLE_ID = "get_table_table_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(DATASET_ID)) - dataset = client.create_dataset(dataset) - to_delete.append(dataset) - - table_ref = dataset.table(TABLE_ID) - table = bigquery.Table(table_ref, schema=SCHEMA) - table = client.create_table(table) - - assert table_exists(client, table_ref) - assert not table_exists(client, dataset.table("i_dont_exist")) - - @pytest.mark.skip( reason=( "update_table() is flaky " @@ -543,7 +500,7 @@ def test_update_table_cmek(client, to_delete): table = bigquery.Table(dataset.table(table_id)) original_kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) table.encryption_configuration = bigquery.EncryptionConfiguration( kms_key_name=original_kms_key_name @@ -559,8 +516,7 @@ def test_update_table_cmek(client, to_delete): # Set a new encryption key to use for the destination. # TODO: Replace this key with a key you have created in KMS. updated_kms_key_name = ( - "projects/cloud-samples-tests/locations/us-central1/" - "keyRings/test/cryptoKeys/otherkey" + "projects/cloud-samples-tests/locations/us/keyRings/test/cryptoKeys/otherkey" ) table.encryption_configuration = bigquery.EncryptionConfiguration( kms_key_name=updated_kms_key_name @@ -698,36 +654,6 @@ def test_manage_views(client, to_delete): # [END bigquery_grant_view_access] -def test_table_insert_rows(client, to_delete): - """Insert / fetch table data.""" - dataset_id = "table_insert_rows_dataset_{}".format(_millis()) - table_id = "table_insert_rows_table_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - dataset = client.create_dataset(dataset) - dataset.location = "US" - to_delete.append(dataset) - - table = bigquery.Table(dataset.table(table_id), schema=SCHEMA) - table = client.create_table(table) - - # [START bigquery_table_insert_rows] - # TODO(developer): Uncomment the lines below and replace with your values. - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_id = 'my_dataset' # replace with your dataset ID - # For this sample, the table must already exist and have a defined schema - # table_id = 'my_table' # replace with your table ID - # table_ref = client.dataset(dataset_id).table(table_id) - # table = client.get_table(table_ref) # API request - - rows_to_insert = [(u"Phred Phlyntstone", 32), (u"Wylma Phlyntstone", 29)] - - errors = client.insert_rows(table, rows_to_insert) # API request - - assert errors == [] - # [END bigquery_table_insert_rows] - - def test_load_table_from_file(client, to_delete): """Upload table data from a CSV file.""" dataset_id = "load_table_from_file_dataset_{}".format(_millis()) @@ -904,7 +830,7 @@ def test_load_table_from_uri_cmek(client, to_delete): # Set the encryption key to use for the destination. # TODO: Replace this key with a key you have created in KMS. kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) job_config.destination_encryption_configuration = encryption_config @@ -993,12 +919,10 @@ def test_load_table_from_uri_orc(client, to_delete, capsys): def test_load_table_from_uri_autodetect(client, to_delete, capsys): """Load table from a GCS URI using various formats and auto-detected schema - Each file format has its own tested load from URI sample. Because most of the code is common for autodetect, append, and truncate, this sample includes snippets for all supported formats but only calls a single load job. - This code snippet is made up of shared code, then format-specific code, followed by more shared code. Note that only the last format in the format-specific code section will be tested in this test. @@ -1058,12 +982,10 @@ def test_load_table_from_uri_autodetect(client, to_delete, capsys): def test_load_table_from_uri_truncate(client, to_delete, capsys): """Replaces table data with data from a GCS URI using various formats - Each file format has its own tested load from URI sample. Because most of the code is common for autodetect, append, and truncate, this sample includes snippets for all supported formats but only calls a single load job. - This code snippet is made up of shared code, then format-specific code, followed by more shared code. Note that only the last format in the format-specific code section will be tested in this test. @@ -1303,38 +1225,6 @@ def test_load_table_relax_column(client, to_delete): assert table.num_rows > 0 -def test_copy_table(client, to_delete): - dataset_id = "copy_table_dataset_{}".format(_millis()) - dest_dataset = bigquery.Dataset(client.dataset(dataset_id)) - dest_dataset.location = "US" - dest_dataset = client.create_dataset(dest_dataset) - to_delete.append(dest_dataset) - - # [START bigquery_copy_table] - # from google.cloud import bigquery - # client = bigquery.Client() - - source_dataset = client.dataset("samples", project="bigquery-public-data") - source_table_ref = source_dataset.table("shakespeare") - - # dataset_id = 'my_dataset' - dest_table_ref = client.dataset(dataset_id).table("destination_table") - - job = client.copy_table( - source_table_ref, - dest_table_ref, - # Location must match that of the source and destination tables. - location="US", - ) # API request - - job.result() # Waits for job to complete. - - assert job.state == "DONE" - dest_table = client.get_table(dest_table_ref) # API request - assert dest_table.num_rows > 0 - # [END bigquery_copy_table] - - def test_copy_table_multiple_source(client, to_delete): dest_dataset_id = "dest_dataset_{}".format(_millis()) dest_dataset = bigquery.Dataset(client.dataset(dest_dataset_id)) @@ -1393,7 +1283,6 @@ def test_copy_table_multiple_source(client, to_delete): assert dest_table.num_rows == 2 -@pytest.mark.skip(reason="Backend responds with a 500 internal error.") def test_copy_table_cmek(client, to_delete): dataset_id = "copy_table_cmek_{}".format(_millis()) dest_dataset = bigquery.Dataset(client.dataset(dataset_id)) @@ -1415,7 +1304,7 @@ def test_copy_table_cmek(client, to_delete): # Set the encryption key to use for the destination. # TODO: Replace this key with a key you have created in KMS. kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) job_config = bigquery.CopyJobConfig() @@ -1602,31 +1491,6 @@ def test_undelete_table(client, to_delete): # [END bigquery_undelete_table] -def test_client_query(client): - """Run a simple query.""" - - # [START bigquery_query] - # from google.cloud import bigquery - # client = bigquery.Client() - - query = ( - "SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013` " - 'WHERE state = "TX" ' - "LIMIT 100" - ) - query_job = client.query( - query, - # Location must match that of the dataset(s) referenced in the query. - location="US", - ) # API request - starts the query - - for row in query_job: # API request - fetches results - # Row values can be accessed by field name or index - assert row[0] == row.name == row["name"] - print(row) - # [END bigquery_query] - - def test_client_query_legacy_sql(client): """Run a query with Legacy SQL explicitly set""" # [START bigquery_query_legacy] @@ -1820,7 +1684,7 @@ def test_client_query_destination_table_cmek(client, to_delete): # Set the encryption key to use for the destination. # TODO: Replace this key with a key you have created in KMS. kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) job_config.destination_encryption_configuration = encryption_config @@ -2304,108 +2168,6 @@ def test_query_external_gcs_permanent_table(client, to_delete): assert len(w_states) == 4 -def test_query_external_sheets_temporary_table(client): - # [START bigquery_query_external_sheets_temp] - # [START bigquery_auth_drive_scope] - import google.auth - - # from google.cloud import bigquery - - # Create credentials with Drive & BigQuery API scopes - # Both APIs must be enabled for your project before running this code - credentials, project = google.auth.default( - scopes=[ - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/bigquery", - ] - ) - client = bigquery.Client(credentials=credentials, project=project) - # [END bigquery_auth_drive_scope] - - # Configure the external data source and query job - external_config = bigquery.ExternalConfig("GOOGLE_SHEETS") - # Use a shareable link or grant viewing access to the email address you - # used to authenticate with BigQuery (this example Sheet is public) - sheet_url = ( - "https://docs.google.com/spreadsheets" - "/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing" - ) - external_config.source_uris = [sheet_url] - external_config.schema = [ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ] - external_config.options.skip_leading_rows = 1 # optionally skip header row - table_id = "us_states" - job_config = bigquery.QueryJobConfig() - job_config.table_definitions = {table_id: external_config} - - # Example query to find states starting with 'W' - sql = 'SELECT * FROM `{}` WHERE name LIKE "W%"'.format(table_id) - - query_job = client.query(sql, job_config=job_config) # API request - - w_states = list(query_job) # Waits for query to finish - print("There are {} states with names starting with W.".format(len(w_states))) - # [END bigquery_query_external_sheets_temp] - assert len(w_states) == 4 - - -def test_query_external_sheets_permanent_table(client, to_delete): - dataset_id = "query_external_sheets_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - client.create_dataset(dataset) - to_delete.append(dataset) - - # [START bigquery_query_external_sheets_perm] - import google.auth - - # from google.cloud import bigquery - # dataset_id = 'my_dataset' - - # Create credentials with Drive & BigQuery API scopes - # Both APIs must be enabled for your project before running this code - credentials, project = google.auth.default( - scopes=[ - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/bigquery", - ] - ) - client = bigquery.Client(credentials=credentials, project=project) - - # Configure the external data source - dataset_ref = client.dataset(dataset_id) - table_id = "us_states" - schema = [ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ] - table = bigquery.Table(dataset_ref.table(table_id), schema=schema) - external_config = bigquery.ExternalConfig("GOOGLE_SHEETS") - # Use a shareable link or grant viewing access to the email address you - # used to authenticate with BigQuery (this example Sheet is public) - sheet_url = ( - "https://docs.google.com/spreadsheets" - "/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing" - ) - external_config.source_uris = [sheet_url] - external_config.options.skip_leading_rows = 1 # optionally skip header row - table.external_data_configuration = external_config - - # Create a permanent table linked to the Sheets file - table = client.create_table(table) # API request - - # Example query to find states starting with 'W' - sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format(dataset_id, table_id) - - query_job = client.query(sql) # API request - - w_states = list(query_job) # Waits for query to finish - print("There are {} states with names starting with W.".format(len(w_states))) - # [END bigquery_query_external_sheets_perm] - assert len(w_states) == 4 - - def test_ddl_create_view(client, to_delete, capsys): """Create a view via a DDL query.""" project = client.project @@ -2463,42 +2225,6 @@ def test_ddl_create_view(client, to_delete, capsys): assert len(df) == 0 -def test_client_list_jobs(client): - """List jobs for a project.""" - - # [START bigquery_list_jobs] - # TODO(developer): Uncomment the lines below and replace with your values. - # from google.cloud import bigquery - # project = 'my_project' # replace with your project ID - # client = bigquery.Client(project=project) - import datetime - - # List the 10 most recent jobs in reverse chronological order. - # Omit the max_results parameter to list jobs from the past 6 months. - print("Last 10 jobs:") - for job in client.list_jobs(max_results=10): # API request(s) - print(job.job_id) - - # The following are examples of additional optional parameters: - - # Use min_creation_time and/or max_creation_time to specify a time window. - print("Jobs from the last ten minutes:") - ten_mins_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=10) - for job in client.list_jobs(min_creation_time=ten_mins_ago): - print(job.job_id) - - # Use all_users to include jobs run by all users in the project. - print("Last 10 jobs run by all users:") - for job in client.list_jobs(max_results=10, all_users=True): - print("{} run by user: {}".format(job.job_id, job.user_email)) - - # Use state_filter to filter by job state. - print("Jobs currently running:") - for job in client.list_jobs(state_filter="RUNNING"): - print(job.job_id) - # [END bigquery_list_jobs] - - @pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_query_results_as_dataframe(client): # [START bigquery_query_results_dataframe] diff --git a/bigquery/docs/usage/jobs.rst b/bigquery/docs/usage/jobs.rst index 914d1d459ee7..c3dd71031bfc 100644 --- a/bigquery/docs/usage/jobs.rst +++ b/bigquery/docs/usage/jobs.rst @@ -1,9 +1,6 @@ Managing Jobs ~~~~~~~~~~~~~ -List jobs for a project -^^^^^^^^^^^^^^^^^^^^^^^ - Jobs describe actions performed on data in BigQuery tables: - Load data into a table @@ -11,7 +8,13 @@ Jobs describe actions performed on data in BigQuery tables: - Extract data from a table - Copy a table -.. literalinclude:: ../snippets.py +Listing jobs +^^^^^^^^^^^^ + +List jobs for a project with the +:func:`~google.cloud.bigquery.client.Client.list_jobs` method: + +.. literalinclude:: ../samples/client_list_jobs.py :language: python :dedent: 4 :start-after: [START bigquery_list_jobs] diff --git a/bigquery/docs/usage/queries.rst b/bigquery/docs/usage/queries.rst index fc77bb5b80cd..5c9dbe18fa63 100644 --- a/bigquery/docs/usage/queries.rst +++ b/bigquery/docs/usage/queries.rst @@ -4,9 +4,10 @@ Running Queries Querying data ^^^^^^^^^^^^^ -Run a query and wait for it to finish: +Run a query and wait for it to finish with the +:func:`~google.cloud.bigquery.client.Client.query` method: -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/client_query.py :language: python :dedent: 4 :start-after: [START bigquery_query] @@ -47,3 +48,16 @@ See BigQuery documentation for more information on :dedent: 4 :start-after: [START bigquery_query_params_named] :end-before: [END bigquery_query_params_named] + +Run a script +^^^^^^^^^^^^ + +See BigQuery documentation for more information on `scripting in BigQuery +standard SQL +`_. + +.. literalinclude:: ../samples/query_script.py + :language: python + :dedent: 4 + :start-after: [START bigquery_query_script] + :end-before: [END bigquery_query_script] diff --git a/bigquery/docs/usage/tables.rst b/bigquery/docs/usage/tables.rst index 458c5b0009ba..20ed79a969f2 100644 --- a/bigquery/docs/usage/tables.rst +++ b/bigquery/docs/usage/tables.rst @@ -28,6 +28,15 @@ Get a table resource with the :start-after: [START bigquery_get_table] :end-before: [END bigquery_get_table] +Determine if a table exists with the +:func:`~google.cloud.bigquery.client.Client.get_table` method: + +.. literalinclude:: ../samples/table_exists.py + :language: python + :dedent: 4 + :start-after: [START bigquery_table_exists] + :end-before: [END bigquery_table_exists] + Browse data rows in a table with the :func:`~google.cloud.bigquery.client.Client.list_rows` method: @@ -49,6 +58,15 @@ Create an empty table with the :start-after: [START bigquery_create_table] :end-before: [END bigquery_create_table] +Create an integer range partitioned table with the +:func:`~google.cloud.bigquery.client.Client.create_table` method: + +.. literalinclude:: ../samples/create_table_range_partitioned.py + :language: python + :dedent: 4 + :start-after: [START bigquery_create_table_range_partitioned] + :end-before: [END bigquery_create_table_range_partitioned] + Load table data from a file with the :func:`~google.cloud.bigquery.client.Client.load_table_from_file` method: @@ -107,12 +125,26 @@ Update a property in a table's metadata with the Insert rows into a table's data with the :func:`~google.cloud.bigquery.client.Client.insert_rows` method: -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/table_insert_rows.py :language: python :dedent: 4 :start-after: [START bigquery_table_insert_rows] :end-before: [END bigquery_table_insert_rows] +Insert rows into a table's data with the +:func:`~google.cloud.bigquery.client.Client.insert_rows` method, achieving +higher write limit: + +.. literalinclude:: ../samples/table_insert_rows_explicit_none_insert_ids.py + :language: python + :dedent: 4 + :start-after: [START bigquery_table_insert_rows_explicit_none_insert_ids] + :end-before: [END bigquery_table_insert_rows_explicit_none_insert_ids] + +Mind that inserting data with ``None`` row insert IDs can come at the expense of +more duplicate inserts. See also: +`Streaming inserts `_. + Add an empty column to the existing table with the :func:`~google.cloud.bigquery.update_table` method: @@ -128,7 +160,7 @@ Copying a Table Copy a table with the :func:`~google.cloud.bigquery.client.Client.copy_table` method: -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/copy_table.py :language: python :dedent: 4 :start-after: [START bigquery_copy_table] diff --git a/bigquery/google/cloud/bigquery/__init__.py b/bigquery/google/cloud/bigquery/__init__.py index bda8c5611435..3982c1175850 100644 --- a/bigquery/google/cloud/bigquery/__init__.py +++ b/bigquery/google/cloud/bigquery/__init__.py @@ -73,12 +73,14 @@ from google.cloud.bigquery.routine import RoutineArgument from google.cloud.bigquery.routine import RoutineReference from google.cloud.bigquery.schema import SchemaField -from google.cloud.bigquery.table import EncryptionConfiguration +from google.cloud.bigquery.table import PartitionRange +from google.cloud.bigquery.table import RangePartitioning +from google.cloud.bigquery.table import Row from google.cloud.bigquery.table import Table from google.cloud.bigquery.table import TableReference -from google.cloud.bigquery.table import Row from google.cloud.bigquery.table import TimePartitioningType from google.cloud.bigquery.table import TimePartitioning +from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration __all__ = [ "__version__", @@ -94,10 +96,14 @@ "DatasetReference", "AccessEntry", # Tables - "EncryptionConfiguration", "Table", "TableReference", + "PartitionRange", + "RangePartitioning", "Row", + "TimePartitioning", + "TimePartitioningType", + # Jobs "CopyJob", "CopyJobConfig", "ExtractJob", @@ -105,8 +111,6 @@ "LoadJob", "LoadJobConfig", "UnknownJob", - "TimePartitioningType", - "TimePartitioning", # Models "Model", "ModelReference", @@ -136,6 +140,8 @@ "StandardSqlDataTypes", "SourceFormat", "WriteDisposition", + # EncryptionConfiguration + "EncryptionConfiguration", ] diff --git a/bigquery/google/cloud/bigquery/_helpers.py b/bigquery/google/cloud/bigquery/_helpers.py index bb3998732a5a..98eadb0a2f8e 100644 --- a/bigquery/google/cloud/bigquery/_helpers.py +++ b/bigquery/google/cloud/bigquery/_helpers.py @@ -18,6 +18,7 @@ import copy import datetime import decimal +import re from google.cloud._helpers import UTC from google.cloud._helpers import _date_from_iso8601_date @@ -29,6 +30,12 @@ _RFC3339_MICROS_NO_ZULU = "%Y-%m-%dT%H:%M:%S.%f" _TIMEONLY_WO_MICROS = "%H:%M:%S" _TIMEONLY_W_MICROS = "%H:%M:%S.%f" +_PROJECT_PREFIX_PATTERN = re.compile( + r""" + (?P\S+\:[^.]+)\.(?P[^.]+)(?:$|\.(?P[^.]+)$) +""", + re.VERBOSE, +) def _not_null(value, field): @@ -83,12 +90,15 @@ def _timestamp_query_param_from_json(value, field): Args: value (str): The timestamp. - field (.SchemaField): The field corresponding to the value. + + field (google.cloud.bigquery.schema.SchemaField): + The field corresponding to the value. Returns: - Optional[datetime.datetime]: The parsed datetime object from - ``value`` if the ``field`` is not null (otherwise it is - :data:`None`). + Optional[datetime.datetime]: + The parsed datetime object from + ``value`` if the ``field`` is not null (otherwise it is + :data:`None`). """ if _not_null(value, field): # Canonical formats for timestamps in BigQuery are flexible. See: @@ -118,12 +128,14 @@ def _datetime_from_json(value, field): Args: value (str): The timestamp. - field (.SchemaField): The field corresponding to the value. + field (google.cloud.bigquery.schema.SchemaField): + The field corresponding to the value. Returns: - Optional[datetime.datetime]: The parsed datetime object from - ``value`` if the ``field`` is not null (otherwise it is - :data:`None`). + Optional[datetime.datetime]: + The parsed datetime object from + ``value`` if the ``field`` is not null (otherwise it is + :data:`None`). """ if _not_null(value, field): if "." in value: @@ -210,16 +222,20 @@ def _row_tuple_from_json(row, schema): Note: ``row['f']`` and ``schema`` are presumed to be of the same length. - :type row: dict - :param row: A JSON response row to be converted. - - :type schema: tuple - :param schema: A tuple of - :class:`~google.cloud.bigquery.schema.SchemaField`. + Args: + row (Dict): A JSON response row to be converted. + schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): Specification of the field types in ``row``. - :rtype: tuple - :returns: A tuple of data converted to native types. + Returns: + Tuple: A tuple of data converted to native types. """ + from google.cloud.bigquery.schema import _to_schema_fields + + schema = _to_schema_fields(schema) + row_data = [] for field, cell in zip(schema, row["f"]): row_data.append(_field_from_json(cell["v"], field)) @@ -227,9 +243,25 @@ def _row_tuple_from_json(row, schema): def _rows_from_json(values, schema): - """Convert JSON row data to rows with appropriate types.""" + """Convert JSON row data to rows with appropriate types. + + Args: + values (Sequence[Dict]): The list of responses (JSON rows) to convert. + schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): + The table's schema. If any item is a mapping, its content must be + compatible with + :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. + + Returns: + List[:class:`~google.cloud.bigquery.Row`] + """ from google.cloud.bigquery import Row + from google.cloud.bigquery.schema import _to_schema_fields + schema = _to_schema_fields(schema) field_to_index = _field_to_index_mapping(schema) return [Row(_row_tuple_from_json(r, schema), field_to_index) for r in values] @@ -337,16 +369,13 @@ def _scalar_field_to_json(field, row_value): """Maps a field and value to a JSON-safe value. Args: - field ( \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - ): + field (google.cloud.bigquery.schema.SchemaField): The SchemaField to use for type conversion and field name. - row_value (any): + row_value (Any): Value to be converted, based on the field's type. Returns: - any: - A JSON-serializable object. + Any: A JSON-serializable object. """ converter = _SCALAR_VALUE_TO_JSON_ROW.get(field.field_type) if converter is None: # STRING doesn't need converting @@ -358,17 +387,14 @@ def _repeated_field_to_json(field, row_value): """Convert a repeated/array field to its JSON representation. Args: - field ( \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - ): + field (google.cloud.bigquery.schema.SchemaField): The SchemaField to use for type conversion and field name. The field mode must equal ``REPEATED``. - row_value (Sequence[any]): + row_value (Sequence[Any]): A sequence of values to convert to JSON-serializable values. Returns: - List[any]: - A list of JSON-serializable objects. + List[Any]: A list of JSON-serializable objects. """ # Remove the REPEATED, but keep the other fields. This allows us to process # each item as if it were a top-level field. @@ -384,17 +410,14 @@ def _record_field_to_json(fields, row_value): """Convert a record/struct field to its JSON representation. Args: - fields ( \ - Sequence[:class:`~google.cloud.bigquery.schema.SchemaField`], \ - ): + fields (Sequence[google.cloud.bigquery.schema.SchemaField]): The :class:`~google.cloud.bigquery.schema.SchemaField`s of the record's subfields to use for type conversion and field names. row_value (Union[Tuple[Any], Mapping[str, Any]): A tuple or dictionary to convert to JSON-serializable values. Returns: - Mapping[str, any]: - A JSON-serializable dictionary. + Mapping[str, Any]: A JSON-serializable dictionary. """ record = {} isdict = isinstance(row_value, dict) @@ -413,22 +436,16 @@ def _field_to_json(field, row_value): """Convert a field into JSON-serializable values. Args: - field ( \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - ): + field (google.cloud.bigquery.schema.SchemaField): The SchemaField to use for type conversion and field name. - row_value (Union[ \ - Sequence[list], \ - any, \ - ]): + row_value (Union[Sequence[List], Any]): Row data to be inserted. If the SchemaField's mode is REPEATED, assume this is a list. If not, the type is inferred from the SchemaField's field_type. Returns: - any: - A JSON-serializable object. + Any: A JSON-serializable object. """ if row_value is None: return None @@ -454,9 +471,9 @@ def _get_sub_prop(container, keys, default=None): This method works like ``dict.get(key)``, but for nested values. Arguments: - container (dict): + container (Dict): A dictionary which may contain other dictionaries as values. - keys (iterable): + keys (Iterable): A sequence of keys to attempt to get the value for. Each item in the sequence represents a deeper nesting. The first key is for the top level. If there is a dictionary there, the second key @@ -497,9 +514,9 @@ def _set_sub_prop(container, keys, value): """Set a nested value in a dictionary. Arguments: - container (dict): + container (Dict): A dictionary which may contain other dictionaries as values. - keys (iterable): + keys (Iterable): A sequence of keys to attempt to set the value for. Each item in the sequence represents a deeper nesting. The first key is for the top level. If there is a dictionary there, the second key @@ -540,9 +557,9 @@ def _del_sub_prop(container, keys): """Remove a nested key fro a dictionary. Arguments: - container (dict): + container (Dict): A dictionary which may contain other dictionaries as values. - keys (iterable): + keys (Iterable): A sequence of keys to attempt to clear the value for. Each item in the sequence represents a deeper nesting. The first key is for the top level. If there is a dictionary there, the second key @@ -586,24 +603,42 @@ def _str_or_none(value): return str(value) +def _split_id(full_id): + """Helper: split full_id into composite parts. + + Args: + full_id (str): Fully-qualified ID in standard SQL format. + + Returns: + List[str]: ID's parts separated into components. + """ + with_prefix = _PROJECT_PREFIX_PATTERN.match(full_id) + if with_prefix is None: + parts = full_id.split(".") + else: + parts = with_prefix.groups() + parts = [part for part in parts if part] + return parts + + def _parse_3_part_id(full_id, default_project=None, property_name="table_id"): output_project_id = default_project output_dataset_id = None output_resource_id = None - parts = full_id.split(".") + parts = _split_id(full_id) if len(parts) != 2 and len(parts) != 3: raise ValueError( "{property_name} must be a fully-qualified ID in " - 'standard SQL format. e.g. "project.dataset.{property_name}", ' + 'standard SQL format, e.g., "project.dataset.{property_name}", ' "got {}".format(full_id, property_name=property_name) ) if len(parts) == 2 and not default_project: raise ValueError( "When default_project is not set, {property_name} must be a " - "fully-qualified ID in standard SQL format. " - 'e.g. "project.dataset_id.{property_name}", got {}'.format( + "fully-qualified ID in standard SQL format, " + 'e.g., "project.dataset_id.{property_name}", got {}'.format( full_id, property_name=property_name ) ) @@ -633,3 +668,18 @@ def _build_resource_from_properties(obj, filter_fields): partial[filter_field] = obj._properties[filter_field] return partial + + +def _verify_job_config_type(job_config, expected_type, param_name="job_config"): + if not isinstance(job_config, expected_type): + msg = ( + "Expected an instance of {expected_type} class for the {param_name} parameter, " + "but received {param_name} = {job_config}" + ) + raise TypeError( + msg.format( + expected_type=expected_type.__name__, + param_name=param_name, + job_config=job_config, + ) + ) diff --git a/bigquery/google/cloud/bigquery/_http.py b/bigquery/google/cloud/bigquery/_http.py index 5dd660ea0b8d..2ff4effefb76 100644 --- a/bigquery/google/cloud/bigquery/_http.py +++ b/bigquery/google/cloud/bigquery/_http.py @@ -22,14 +22,13 @@ class Connection(_http.JSONConnection): """A connection to Google BigQuery via the JSON REST API. - :type client: :class:`~google.cloud.bigquery.client.Client` - :param client: The client that owns the current connection. + Args: + client (google.cloud.bigquery.client.Client): The client that owns the current connection. - :type client_info: :class:`~google.api_core.client_info.ClientInfo` - :param client_info: (Optional) instance used to generate user agent. + client_info (google.api_core.client_info.ClientInfo): (Optional) instance used to generate user agent. """ - DEFAULT_API_ENDPOINT = "https://www.googleapis.com" + DEFAULT_API_ENDPOINT = "https://bigquery.googleapis.com" def __init__(self, client, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT): super(Connection, self).__init__(client, client_info) diff --git a/bigquery/google/cloud/bigquery/_pandas_helpers.py b/bigquery/google/cloud/bigquery/_pandas_helpers.py index bfbaf92bbe38..6e91a9624b06 100644 --- a/bigquery/google/cloud/bigquery/_pandas_helpers.py +++ b/bigquery/google/cloud/bigquery/_pandas_helpers.py @@ -110,8 +110,35 @@ def pyarrow_timestamp(): "TIME": pyarrow_time, "TIMESTAMP": pyarrow_timestamp, } + ARROW_SCALAR_IDS_TO_BQ = { + # https://arrow.apache.org/docs/python/api/datatypes.html#type-classes + pyarrow.bool_().id: "BOOL", + pyarrow.int8().id: "INT64", + pyarrow.int16().id: "INT64", + pyarrow.int32().id: "INT64", + pyarrow.int64().id: "INT64", + pyarrow.uint8().id: "INT64", + pyarrow.uint16().id: "INT64", + pyarrow.uint32().id: "INT64", + pyarrow.uint64().id: "INT64", + pyarrow.float16().id: "FLOAT64", + pyarrow.float32().id: "FLOAT64", + pyarrow.float64().id: "FLOAT64", + pyarrow.time32("ms").id: "TIME", + pyarrow.time64("ns").id: "TIME", + pyarrow.timestamp("ns").id: "TIMESTAMP", + pyarrow.date32().id: "DATE", + pyarrow.date64().id: "DATETIME", # because millisecond resolution + pyarrow.binary().id: "BYTES", + pyarrow.string().id: "STRING", # also alias for pyarrow.utf8() + pyarrow.decimal128(38, scale=9).id: "NUMERIC", + # The exact decimal's scale and precision are not important, as only + # the type ID matters, and it's the same for all decimal128 instances. + } + else: # pragma: NO COVER BQ_TO_ARROW_SCALARS = {} # pragma: NO COVER + ARROW_SCALAR_IDS_TO_BQ = {} # pragma: NO_COVER def bq_to_arrow_struct_data_type(field): @@ -130,7 +157,8 @@ def bq_to_arrow_struct_data_type(field): def bq_to_arrow_data_type(field): """Return the Arrow data type, corresponding to a given BigQuery column. - Returns None if default Arrow type inspection should be used. + Returns: + None: if default Arrow type inspection should be used. """ if field.mode is not None and field.mode.upper() == "REPEATED": inner_type = bq_to_arrow_data_type( @@ -140,10 +168,11 @@ def bq_to_arrow_data_type(field): return pyarrow.list_(inner_type) return None - if field.field_type.upper() in schema._STRUCT_TYPES: + field_type_upper = field.field_type.upper() if field.field_type else "" + if field_type_upper in schema._STRUCT_TYPES: return bq_to_arrow_struct_data_type(field) - data_type_constructor = BQ_TO_ARROW_SCALARS.get(field.field_type.upper()) + data_type_constructor = BQ_TO_ARROW_SCALARS.get(field_type_upper) if data_type_constructor is None: return None return data_type_constructor() @@ -152,7 +181,8 @@ def bq_to_arrow_data_type(field): def bq_to_arrow_field(bq_field): """Return the Arrow field, corresponding to a given BigQuery column. - Returns None if the Arrow type cannot be determined. + Returns: + None: if the Arrow type cannot be determined. """ arrow_type = bq_to_arrow_data_type(bq_field) if arrow_type: @@ -166,7 +196,8 @@ def bq_to_arrow_field(bq_field): def bq_to_arrow_schema(bq_schema): """Return the Arrow schema, corresponding to a given BigQuery schema. - Returns None if any Arrow type cannot be determined. + Returns: + None: if any Arrow type cannot be determined. """ arrow_fields = [] for bq_field in bq_schema: @@ -180,9 +211,12 @@ def bq_to_arrow_schema(bq_schema): def bq_to_arrow_array(series, bq_field): arrow_type = bq_to_arrow_data_type(bq_field) + + field_type_upper = bq_field.field_type.upper() if bq_field.field_type else "" + if bq_field.mode.upper() == "REPEATED": return pyarrow.ListArray.from_pandas(series, type=arrow_type) - if bq_field.field_type.upper() in schema._STRUCT_TYPES: + if field_type_upper in schema._STRUCT_TYPES: return pyarrow.StructArray.from_pandas(series, type=arrow_type) return pyarrow.array(series, type=arrow_type) @@ -236,7 +270,10 @@ def dataframe_to_bq_schema(dataframe, bq_schema): Args: dataframe (pandas.DataFrame): DataFrame for which the client determines the BigQuery schema. - bq_schema (Sequence[google.cloud.bigquery.schema.SchemaField]): + bq_schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): A BigQuery schema. Use this argument to override the autodetected type for some or all of the DataFrame columns. @@ -246,6 +283,7 @@ def dataframe_to_bq_schema(dataframe, bq_schema): any column cannot be determined. """ if bq_schema: + bq_schema = schema._to_schema_fields(bq_schema) for field in bq_schema: if field.field_type in schema._STRUCT_TYPES: raise ValueError( @@ -260,6 +298,8 @@ def dataframe_to_bq_schema(dataframe, bq_schema): bq_schema_unused = set() bq_schema_out = [] + unknown_type_fields = [] + for column, dtype in list_columns_and_indexes(dataframe): # Use provided type from schema, if present. bq_field = bq_schema_index.get(column) @@ -271,12 +311,12 @@ def dataframe_to_bq_schema(dataframe, bq_schema): # Otherwise, try to automatically determine the type based on the # pandas dtype. bq_type = _PANDAS_DTYPE_TO_BQ.get(dtype.name) - if not bq_type: - warnings.warn(u"Unable to determine type of column '{}'.".format(column)) - return None bq_field = schema.SchemaField(column, bq_type) bq_schema_out.append(bq_field) + if bq_field.field_type is None: + unknown_type_fields.append(bq_field) + # Catch any schema mismatch. The developer explicitly asked to serialize a # column, but it was not found. if bq_schema_unused: @@ -285,7 +325,73 @@ def dataframe_to_bq_schema(dataframe, bq_schema): bq_schema_unused ) ) - return tuple(bq_schema_out) + + # If schema detection was not successful for all columns, also try with + # pyarrow, if available. + if unknown_type_fields: + if not pyarrow: + msg = u"Could not determine the type of columns: {}".format( + ", ".join(field.name for field in unknown_type_fields) + ) + warnings.warn(msg) + return None # We cannot detect the schema in full. + + # The augment_schema() helper itself will also issue unknown type + # warnings if detection still fails for any of the fields. + bq_schema_out = augment_schema(dataframe, bq_schema_out) + + return tuple(bq_schema_out) if bq_schema_out else None + + +def augment_schema(dataframe, current_bq_schema): + """Try to deduce the unknown field types and return an improved schema. + + This function requires ``pyarrow`` to run. If all the missing types still + cannot be detected, ``None`` is returned. If all types are already known, + a shallow copy of the given schema is returned. + + Args: + dataframe (pandas.DataFrame): + DataFrame for which some of the field types are still unknown. + current_bq_schema (Sequence[google.cloud.bigquery.schema.SchemaField]): + A BigQuery schema for ``dataframe``. The types of some or all of + the fields may be ``None``. + Returns: + Optional[Sequence[google.cloud.bigquery.schema.SchemaField]] + """ + augmented_schema = [] + unknown_type_fields = [] + + for field in current_bq_schema: + if field.field_type is not None: + augmented_schema.append(field) + continue + + arrow_table = pyarrow.array(dataframe[field.name]) + detected_type = ARROW_SCALAR_IDS_TO_BQ.get(arrow_table.type.id) + + if detected_type is None: + unknown_type_fields.append(field) + continue + + new_field = schema.SchemaField( + name=field.name, + field_type=detected_type, + mode=field.mode, + description=field.description, + fields=field.fields, + ) + augmented_schema.append(new_field) + + if unknown_type_fields: + warnings.warn( + u"Pyarrow could not determine the type of columns: {}.".format( + ", ".join(field.name for field in unknown_type_fields) + ) + ) + return None + + return augmented_schema def dataframe_to_arrow(dataframe, bq_schema): @@ -294,9 +400,12 @@ def dataframe_to_arrow(dataframe, bq_schema): Args: dataframe (pandas.DataFrame): DataFrame to convert to Arrow table. - bq_schema (Sequence[google.cloud.bigquery.schema.SchemaField]): - Desired BigQuery schema. Number of columns must match number of - columns in the DataFrame. + bq_schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): + Desired BigQuery schema. The number of columns must match the + number of columns in the DataFrame. Returns: pyarrow.Table: @@ -307,6 +416,8 @@ def dataframe_to_arrow(dataframe, bq_schema): column_and_index_names = set( name for name, _ in list_columns_and_indexes(dataframe) ) + + bq_schema = schema._to_schema_fields(bq_schema) bq_field_names = set(field.name for field in bq_schema) extra_fields = bq_field_names - column_and_index_names @@ -351,7 +462,10 @@ def dataframe_to_parquet(dataframe, bq_schema, filepath, parquet_compression="SN Args: dataframe (pandas.DataFrame): DataFrame to convert to Parquet file. - bq_schema (Sequence[google.cloud.bigquery.schema.SchemaField]): + bq_schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): Desired BigQuery schema. Number of columns must match number of columns in the DataFrame. filepath (str): @@ -365,6 +479,7 @@ def dataframe_to_parquet(dataframe, bq_schema, filepath, parquet_compression="SN if pyarrow is None: raise ValueError("pyarrow is required for BigQuery schema conversion.") + bq_schema = schema._to_schema_fields(bq_schema) arrow_table = dataframe_to_arrow(dataframe, bq_schema) pyarrow.parquet.write_table(arrow_table, filepath, compression=parquet_compression) @@ -380,13 +495,29 @@ def _tabledata_list_page_to_arrow(page, column_names, arrow_types): for column_index, arrow_type in enumerate(arrow_types): arrays.append(pyarrow.array(page._columns[column_index], type=arrow_type)) - return pyarrow.RecordBatch.from_arrays(arrays, column_names) + if isinstance(column_names, pyarrow.Schema): + return pyarrow.RecordBatch.from_arrays(arrays, schema=column_names) + return pyarrow.RecordBatch.from_arrays(arrays, names=column_names) + +def download_arrow_tabledata_list(pages, bq_schema): + """Use tabledata.list to construct an iterable of RecordBatches. -def download_arrow_tabledata_list(pages, schema): - """Use tabledata.list to construct an iterable of RecordBatches.""" - column_names = bq_to_arrow_schema(schema) or [field.name for field in schema] - arrow_types = [bq_to_arrow_data_type(field) for field in schema] + Args: + pages (Iterator[:class:`google.api_core.page_iterator.Page`]): + An iterator over the result pages. + bq_schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): + A decription of the fields in result pages. + Yields: + :class:`pyarrow.RecordBatch` + The next page of records as a ``pyarrow`` record batch. + """ + bq_schema = schema._to_schema_fields(bq_schema) + column_names = bq_to_arrow_schema(bq_schema) or [field.name for field in bq_schema] + arrow_types = [bq_to_arrow_data_type(field) for field in bq_schema] for page in pages: yield _tabledata_list_page_to_arrow(page, column_names, arrow_types) @@ -407,9 +538,26 @@ def _tabledata_list_page_to_dataframe(page, column_names, dtypes): return pandas.DataFrame(columns, columns=column_names) -def download_dataframe_tabledata_list(pages, schema, dtypes): - """Use (slower, but free) tabledata.list to construct a DataFrame.""" - column_names = [field.name for field in schema] +def download_dataframe_tabledata_list(pages, bq_schema, dtypes): + """Use (slower, but free) tabledata.list to construct a DataFrame. + + Args: + pages (Iterator[:class:`google.api_core.page_iterator.Page`]): + An iterator over the result pages. + bq_schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): + A decription of the fields in result pages. + dtypes(Mapping[str, numpy.dtype]): + The types of columns in result data to hint construction of the + resulting DataFrame. Not all column types have to be specified. + Yields: + :class:`pandas.DataFrame` + The next page of records as a ``pandas.DataFrame`` record batch. + """ + bq_schema = schema._to_schema_fields(bq_schema) + column_names = [field.name for field in bq_schema] for page in pages: yield _tabledata_list_page_to_dataframe(page, column_names, dtypes) diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index c33e119cbc74..bae4359300f8 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -15,6 +15,7 @@ """Client for interacting with the Google BigQuery API.""" from __future__ import absolute_import +from __future__ import division try: from collections import abc as collections_abc @@ -25,7 +26,9 @@ import functools import gzip import io +import itertools import json +import math import os import tempfile import uuid @@ -50,6 +53,7 @@ from google.cloud.bigquery._helpers import _record_field_to_json from google.cloud.bigquery._helpers import _str_or_none +from google.cloud.bigquery._helpers import _verify_job_config_type from google.cloud.bigquery._http import Connection from google.cloud.bigquery import _pandas_helpers from google.cloud.bigquery.dataset import Dataset @@ -75,7 +79,7 @@ _MAX_MULTIPART_SIZE = 5 * 1024 * 1024 _DEFAULT_NUM_RETRIES = 6 _BASE_UPLOAD_TEMPLATE = ( - u"https://www.googleapis.com/upload/bigquery/v2/projects/" + u"https://bigquery.googleapis.com/upload/bigquery/v2/projects/" u"{project}/jobs?uploadType=" ) _MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"multipart" @@ -92,14 +96,12 @@ class Project(object): """Wrapper for resource describing a BigQuery project. - :type project_id: str - :param project_id: Opaque ID of the project + Args: + project_id (str): Opaque ID of the project - :type numeric_id: int - :param numeric_id: Numeric ID of the project + numeric_id (int): Numeric ID of the project - :type friendly_name: str - :param friendly_name: Display name of the project + friendly_name (str): Display name of the project """ def __init__(self, project_id, numeric_id, friendly_name): @@ -143,7 +145,7 @@ class Client(ClientWithProject): requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own library or partner tool. - client_options (Union[~google.api_core.client_options.ClientOptions, dict]): + client_options (Union[google.api_core.client_options.ClientOptions, Dict]): (Optional) Client options used to set user options on the client. API Endpoint should be set through client_options. @@ -227,25 +229,25 @@ def list_projects(self, max_results=None, page_token=None, retry=DEFAULT_RETRY): See https://cloud.google.com/bigquery/docs/reference/rest/v2/projects/list - :type max_results: int - :param max_results: (Optional) maximum number of projects to return, - If not passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: - (Optional) Token representing a cursor into the projects. If - not passed, the API will return the first page of projects. - The token marks the beginning of the iterator to be returned - and the value of the ``page_token`` can be accessed at - ``next_page_token`` of the - :class:`~google.api_core.page_iterator.HTTPIterator`. - - :type retry: :class:`google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.bigquery.client.Project` - accessible to the current client. + Args: + max_results (int): + (Optional) maximum number of projects to return, + If not passed, defaults to a value set by the API. + + page_token (str): + (Optional) Token representing a cursor into the projects. If + not passed, the API will return the first page of projects. + The token marks the beginning of the iterator to be returned + and the value of the ``page_token`` can be accessed at + ``next_page_token`` of the + :class:`~google.api_core.page_iterator.HTTPIterator`. + + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. + + Returns: + google.api_core.page_iterator.Iterator: + Iterator of :class:`~google.cloud.bigquery.client.Project` + accessible to the current client. """ return page_iterator.HTTPIterator( client=self, @@ -281,7 +283,7 @@ def list_datasets( filter (str): Optional. An expression for filtering the results by label. For syntax, see - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list#filter. + https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list#body.QUERY_PARAMETERS.filter max_results (int): Optional. Maximum number of datasets to return. page_token (str): @@ -296,8 +298,7 @@ def list_datasets( Returns: google.api_core.page_iterator.Iterator: - Iterator of - :class:`~google.cloud.bigquery.dataset.DatasetListItem`. + Iterator of :class:`~google.cloud.bigquery.dataset.DatasetListItem`. associated with the project. """ extra_params = {} @@ -324,15 +325,16 @@ def list_datasets( def dataset(self, dataset_id, project=None): """Construct a reference to a dataset. - :type dataset_id: str - :param dataset_id: ID of the dataset. + Args: + dataset_id (str): ID of the dataset. - :type project: str - :param project: (Optional) project ID for the dataset (defaults to - the project of the client). + project (str): + (Optional) project ID for the dataset (defaults to + the project of the client). - :rtype: :class:`google.cloud.bigquery.dataset.DatasetReference` - :returns: a new ``DatasetReference`` instance + Returns: + google.cloud.bigquery.dataset.DatasetReference: + a new ``DatasetReference`` instance. """ if project is None: project = self.project @@ -347,8 +349,8 @@ def create_dataset(self, dataset, exists_ok=False, retry=DEFAULT_RETRY): Args: dataset (Union[ \ - :class:`~google.cloud.bigquery.dataset.Dataset`, \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ + google.cloud.bigquery.dataset.Dataset, \ + google.cloud.bigquery.dataset.DatasetReference, \ str, \ ]): A :class:`~google.cloud.bigquery.dataset.Dataset` to create. @@ -400,7 +402,7 @@ def create_routine(self, routine, exists_ok=False, retry=DEFAULT_RETRY): https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/insert Args: - routine (:class:`~google.cloud.bigquery.routine.Routine`): + routine (google.cloud.bigquery.routine.Routine): A :class:`~google.cloud.bigquery.routine.Routine` to create. The dataset that the routine belongs to must already exist. exists_ok (bool): @@ -436,8 +438,8 @@ def create_table(self, table, exists_ok=False, retry=DEFAULT_RETRY): Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): A :class:`~google.cloud.bigquery.table.Table` to create. @@ -477,14 +479,14 @@ def get_dataset(self, dataset_ref, retry=DEFAULT_RETRY): Args: dataset_ref (Union[ \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ + google.cloud.bigquery.dataset.DatasetReference, \ str, \ ]): A reference to the dataset to fetch from the BigQuery API. If a string is passed in, this method attempts to create a dataset reference from a string using :func:`~google.cloud.bigquery.dataset.DatasetReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: @@ -504,19 +506,18 @@ def get_model(self, model_ref, retry=DEFAULT_RETRY): Args: model_ref (Union[ \ - :class:`~google.cloud.bigquery.model.ModelReference`, \ + google.cloud.bigquery.model.ModelReference, \ str, \ ]): A reference to the model to fetch from the BigQuery API. If a string is passed in, this method attempts to create a model reference from a string using :func:`google.cloud.bigquery.model.ModelReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: - google.cloud.bigquery.model.Model: - A ``Model`` instance. + google.cloud.bigquery.model.Model: A ``Model`` instance. """ if isinstance(model_ref, str): model_ref = ModelReference.from_string( @@ -531,15 +532,15 @@ def get_routine(self, routine_ref, retry=DEFAULT_RETRY): Args: routine_ref (Union[ \ - :class:`~google.cloud.bigquery.routine.Routine`, \ - :class:`~google.cloud.bigquery.routine.RoutineReference`, \ + google.cloud.bigquery.routine.Routine, \ + google.cloud.bigquery.routine.RoutineReference, \ str, \ ]): A reference to the routine to fetch from the BigQuery API. If a string is passed in, this method attempts to create a reference from a string using :func:`google.cloud.bigquery.routine.RoutineReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the API call. Returns: @@ -559,15 +560,15 @@ def get_table(self, table, retry=DEFAULT_RETRY): Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): A reference to the table to fetch from the BigQuery API. If a string is passed in, this method attempts to create a table reference from a string using :func:`google.cloud.bigquery.table.TableReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: @@ -740,8 +741,8 @@ def list_models( Args: dataset (Union[ \ - :class:`~google.cloud.bigquery.dataset.Dataset`, \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ + google.cloud.bigquery.dataset.Dataset, \ + google.cloud.bigquery.dataset.DatasetReference, \ str, \ ]): A reference to the dataset whose models to list from the @@ -758,7 +759,7 @@ def list_models( the value of the ``page_token`` can be accessed at ``next_page_token`` of the :class:`~google.api_core.page_iterator.HTTPIterator`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: @@ -798,8 +799,8 @@ def list_routines( Args: dataset (Union[ \ - :class:`~google.cloud.bigquery.dataset.Dataset`, \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ + google.cloud.bigquery.dataset.Dataset, \ + google.cloud.bigquery.dataset.DatasetReference, \ str, \ ]): A reference to the dataset whose routines to list from the @@ -816,7 +817,7 @@ def list_routines( the value of the ``page_token`` can be accessed at ``next_page_token`` of the :class:`~google.api_core.page_iterator.HTTPIterator`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: @@ -856,8 +857,8 @@ def list_tables( Args: dataset (Union[ \ - :class:`~google.cloud.bigquery.dataset.Dataset`, \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ + google.cloud.bigquery.dataset.Dataset, \ + google.cloud.bigquery.dataset.DatasetReference, \ str, \ ]): A reference to the dataset whose tables to list from the @@ -874,7 +875,7 @@ def list_tables( the value of the ``page_token`` can be accessed at ``next_page_token`` of the :class:`~google.api_core.page_iterator.HTTPIterator`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: @@ -914,8 +915,8 @@ def delete_dataset( Args dataset (Union[ \ - :class:`~google.cloud.bigquery.dataset.Dataset`, \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ + google.cloud.bigquery.dataset.Dataset, \ + google.cloud.bigquery.dataset.DatasetReference, \ str, \ ]): A reference to the dataset to delete. If a string is passed @@ -926,7 +927,7 @@ def delete_dataset( (Optional) If True, delete all the tables in the dataset. If False and the dataset contains tables, the request will fail. Default is False. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. not_found_ok (bool): Defaults to ``False``. If ``True``, ignore "not found" errors @@ -960,15 +961,15 @@ def delete_model(self, model, retry=DEFAULT_RETRY, not_found_ok=False): Args: model (Union[ \ - :class:`~google.cloud.bigquery.model.Model`, \ - :class:`~google.cloud.bigquery.model.ModelReference`, \ + google.cloud.bigquery.model.Model, \ + google.cloud.bigquery.model.ModelReference, \ str, \ ]): A reference to the model to delete. If a string is passed in, this method attempts to create a model reference from a string using :func:`google.cloud.bigquery.model.ModelReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. not_found_ok (bool): Defaults to ``False``. If ``True``, ignore "not found" errors @@ -994,15 +995,15 @@ def delete_routine(self, routine, retry=DEFAULT_RETRY, not_found_ok=False): Args: model (Union[ \ - :class:`~google.cloud.bigquery.routine.Routine`, \ - :class:`~google.cloud.bigquery.routine.RoutineReference`, \ + google.cloud.bigquery.routine.Routine, \ + google.cloud.bigquery.routine.RoutineReference, \ str, \ ]): A reference to the routine to delete. If a string is passed in, this method attempts to create a routine reference from a string using :func:`google.cloud.bigquery.routine.RoutineReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. not_found_ok (bool): Defaults to ``False``. If ``True``, ignore "not found" errors @@ -1030,15 +1031,15 @@ def delete_table(self, table, retry=DEFAULT_RETRY, not_found_ok=False): Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): A reference to the table to delete. If a string is passed in, this method attempts to create a table reference from a string using :func:`google.cloud.bigquery.table.TableReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. not_found_ok (bool): Defaults to ``False``. If ``True``, ignore "not found" errors @@ -1103,15 +1104,17 @@ def _get_query_results( def job_from_resource(self, resource): """Detect correct job type from resource and instantiate. - :type resource: dict - :param resource: one job resource from API response + Args: + resource (Dict): one job resource from API response - :rtype: One of: - :class:`google.cloud.bigquery.job.LoadJob`, - :class:`google.cloud.bigquery.job.CopyJob`, - :class:`google.cloud.bigquery.job.ExtractJob`, - or :class:`google.cloud.bigquery.job.QueryJob` - :returns: the job instance, constructed via the resource + Returns: + Union[ \ + google.cloud.bigquery.job.LoadJob, \ + google.cloud.bigquery.job.CopyJob, \ + google.cloud.bigquery.job.ExtractJob, \ + google.cloud.bigquery.job.QueryJob \ + ]: + The job instance, constructed via the resource. """ config = resource.get("configuration", {}) if "load" in config: @@ -1142,10 +1145,12 @@ def get_job(self, job_id, project=None, location=None, retry=DEFAULT_RETRY): (Optional) How to retry the RPC. Returns: - Union[google.cloud.bigquery.job.LoadJob, \ - google.cloud.bigquery.job.CopyJob, \ - google.cloud.bigquery.job.ExtractJob, \ - google.cloud.bigquery.job.QueryJob]: + Union[ \ + google.cloud.bigquery.job.LoadJob, \ + google.cloud.bigquery.job.CopyJob, \ + google.cloud.bigquery.job.ExtractJob, \ + google.cloud.bigquery.job.QueryJob \ + ]: Job instance, based on the resource returned by the API. """ extra_params = {"projection": "full"} @@ -1173,7 +1178,7 @@ def cancel_job(self, job_id, project=None, location=None, retry=DEFAULT_RETRY): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/cancel - Arguments: + Args: job_id (str): Unique job identifier. Keyword Arguments: @@ -1185,10 +1190,12 @@ def cancel_job(self, job_id, project=None, location=None, retry=DEFAULT_RETRY): (Optional) How to retry the RPC. Returns: - Union[google.cloud.bigquery.job.LoadJob, \ - google.cloud.bigquery.job.CopyJob, \ - google.cloud.bigquery.job.ExtractJob, \ - google.cloud.bigquery.job.QueryJob]: + Union[ \ + google.cloud.bigquery.job.LoadJob, \ + google.cloud.bigquery.job.CopyJob, \ + google.cloud.bigquery.job.ExtractJob, \ + google.cloud.bigquery.job.QueryJob, \ + ]: Job instance, based on the resource returned by the API. """ extra_params = {"projection": "full"} @@ -1213,6 +1220,7 @@ def cancel_job(self, job_id, project=None, location=None, retry=DEFAULT_RETRY): def list_jobs( self, project=None, + parent_job=None, max_results=None, page_token=None, all_users=None, @@ -1227,33 +1235,38 @@ def list_jobs( https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/list Args: - project (str, optional): + project (Optional[str]): Project ID to use for retreiving datasets. Defaults to the client's project. - max_results (int, optional): + parent_job (Optional[Union[ \ + google.cloud.bigquery.job._AsyncJob, \ + str, \ + ]]): + If set, retrieve only child jobs of the specified parent. + max_results (Optional[int]): Maximum number of jobs to return. - page_token (str, optional): + page_token (Optional[str]): Opaque marker for the next "page" of jobs. If not passed, the API will return the first page of jobs. The token marks the beginning of the iterator to be returned and the value of the ``page_token`` can be accessed at ``next_page_token`` of :class:`~google.api_core.page_iterator.HTTPIterator`. - all_users (bool, optional): + all_users (Optional[bool]): If true, include jobs owned by all users in the project. Defaults to :data:`False`. - state_filter (str, optional): + state_filter (Optional[str]): If set, include only jobs matching the given state. One of: * ``"done"`` * ``"pending"`` * ``"running"`` - retry (google.api_core.retry.Retry, optional): + retry (Optional[google.api_core.retry.Retry]): How to retry the RPC. - min_creation_time (datetime.datetime, optional): + min_creation_time (Optional[datetime.datetime]): Min value for job creation time. If set, only jobs created after or at this timestamp are returned. If the datetime has no time zone assumes UTC time. - max_creation_time (datetime.datetime, optional): + max_creation_time (Optional[datetime.datetime]): Max value for job creation time. If set, only jobs created before or at this timestamp are returned. If the datetime has no time zone assumes UTC time. @@ -1262,6 +1275,9 @@ def list_jobs( google.api_core.page_iterator.Iterator: Iterable of job instances. """ + if isinstance(parent_job, job._AsyncJob): + parent_job = parent_job.job_id + extra_params = { "allUsers": all_users, "stateFilter": state_filter, @@ -1272,6 +1288,7 @@ def list_jobs( google.cloud._helpers._millis_from_datetime(max_creation_time) ), "projection": "full", + "parentJobId": parent_job, } extra_params = { @@ -1307,15 +1324,15 @@ def load_table_from_uri( """Starts a job for loading data into a table from CloudStorage. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationload Arguments: source_uris (Union[str, Sequence[str]]): URIs of data files to be loaded; in format ``gs:///``. destination (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): Table into which data is to be loaded. If a string is passed @@ -1342,6 +1359,11 @@ def load_table_from_uri( Returns: google.cloud.bigquery.job.LoadJob: A new load job. + + Raises: + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.LoadJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) @@ -1357,6 +1379,10 @@ def load_table_from_uri( source_uris = [source_uris] destination = _table_arg_to_table_ref(destination, default_project=self.project) + + if job_config: + _verify_job_config_type(job_config, google.cloud.bigquery.job.LoadJobConfig) + load_job = job.LoadJob(job_ref, source_uris, destination, self, job_config) load_job._begin(retry=retry) @@ -1383,8 +1409,8 @@ def load_table_from_file( Arguments: file_obj (file): A file handle opened in binary mode for reading. destination (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): Table into which data is to be loaded. If a string is passed @@ -1423,6 +1449,10 @@ def load_table_from_file( If ``size`` is not passed in and can not be determined, or if the ``file_obj`` can be detected to be a file opened in text mode. + + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.LoadJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) @@ -1434,6 +1464,8 @@ def load_table_from_file( destination = _table_arg_to_table_ref(destination, default_project=self.project) job_ref = job._JobReference(job_id, project=project, location=location) + if job_config: + _verify_job_config_type(job_config, google.cloud.bigquery.job.LoadJobConfig) load_job = job.LoadJob(job_ref, None, destination, self, job_config) job_resource = load_job.to_api_repr() @@ -1488,19 +1520,19 @@ def load_table_from_dataframe( :func:`google.cloud.bigquery.table.TableReference.from_string`. Keyword Arguments: - num_retries (int, optional): Number of upload retries. - job_id (str, optional): Name of the job. - job_id_prefix (str, optional): + num_retries (Optional[int]): Number of upload retries. + job_id (Optional[str]): Name of the job. + job_id_prefix (Optional[str]): The user-provided prefix for a randomly generated job ID. This parameter will be ignored if a ``job_id`` is also given. location (str): Location where to run the job. Must match the location of the destination table. - project (str, optional): + project (Optional[str]): Project ID of the project of where to run the job. Defaults to the client's project. - job_config (~google.cloud.bigquery.job.LoadJobConfig, optional): + job_config (Optional[google.cloud.bigquery.job.LoadJobConfig]): Extra configuration options for the job. To override the default pandas data type conversions, supply @@ -1532,16 +1564,22 @@ def load_table_from_dataframe( If a usable parquet engine cannot be found. This method requires :mod:`pyarrow` or :mod:`fastparquet` to be installed. + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.LoadJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) - if job_config is None: - job_config = job.LoadJobConfig() - else: + if job_config: + _verify_job_config_type(job_config, google.cloud.bigquery.job.LoadJobConfig) # Make a copy so that the job config isn't modified in-place. job_config_properties = copy.deepcopy(job_config._properties) job_config = job.LoadJobConfig() job_config._properties = job_config_properties + + else: + job_config = job.LoadJobConfig() + job_config.source_format = job.SourceFormat.PARQUET if location is None: @@ -1572,6 +1610,16 @@ def load_table_from_dataframe( dataframe, job_config.schema ) + if not job_config.schema: + # the schema could not be fully detected + warnings.warn( + "Schema could not be detected for all columns. Loading from a " + "dataframe without a schema will be deprecated in the future, " + "please provide a schema.", + PendingDeprecationWarning, + stacklevel=2, + ) + tmpfd, tmppath = tempfile.mkstemp(suffix="_job_{}.parquet".format(job_id[:8])) os.close(tmpfd) @@ -1627,13 +1675,29 @@ def load_table_from_json( ): """Upload the contents of a table from a JSON string or dict. - Arguments: + Args: json_rows (Iterable[Dict[str, Any]]): Row data to be inserted. Keys must match the table schema fields and values must be JSON-compatible representations. + + .. note:: + + If your data is already a newline-delimited JSON string, + it is best to wrap it into a file-like object and pass it + to :meth:`~google.cloud.bigquery.client.Client.load_table_from_file`:: + + import io + from google.cloud import bigquery + + data = u'{"foo": "bar"}' + data_as_file = io.StringIO(data) + + client = bigquery.Client() + client.load_table_from_file(data_as_file, ...) + destination (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): Table into which data is to be loaded. If a string is passed @@ -1642,7 +1706,7 @@ def load_table_from_json( :func:`google.cloud.bigquery.table.TableReference.from_string`. Keyword Arguments: - num_retries (int, optional): Number of upload retries. + num_retries (Optional[int]): Number of upload retries. job_id (str): (Optional) Name of the job. job_id_prefix (str): (Optional) the user-provided prefix for a randomly generated @@ -1661,14 +1725,21 @@ def load_table_from_json( Returns: google.cloud.bigquery.job.LoadJob: A new load job. + + Raises: + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.LoadJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) - if job_config is None: - job_config = job.LoadJobConfig() - else: + if job_config: + _verify_job_config_type(job_config, google.cloud.bigquery.job.LoadJobConfig) # Make a copy so that the job config isn't modified in-place. job_config = copy.deepcopy(job_config) + else: + job_config = job.LoadJobConfig() + job_config.source_format = job.SourceFormat.NEWLINE_DELIMITED_JSON if job_config.schema is None: @@ -1699,19 +1770,19 @@ def load_table_from_json( def _do_resumable_upload(self, stream, metadata, num_retries): """Perform a resumable upload. - :type stream: IO[bytes] - :param stream: A bytes IO object open for reading. + Args: + stream (IO[bytes]): A bytes IO object open for reading. - :type metadata: dict - :param metadata: The metadata associated with the upload. + metadata (Dict): The metadata associated with the upload. - :type num_retries: int - :param num_retries: Number of upload retries. (Deprecated: This - argument will be removed in a future release.) + num_retries (int): + Number of upload retries. (Deprecated: This + argument will be removed in a future release.) - :rtype: :class:`~requests.Response` - :returns: The "200 OK" response object returned after the final chunk - is uploaded. + Returns: + requests.Response: + The "200 OK" response object returned after the final chunk + is uploaded. """ upload, transport = self._initiate_resumable_upload( stream, metadata, num_retries @@ -1725,23 +1796,22 @@ def _do_resumable_upload(self, stream, metadata, num_retries): def _initiate_resumable_upload(self, stream, metadata, num_retries): """Initiate a resumable upload. - :type stream: IO[bytes] - :param stream: A bytes IO object open for reading. + Args: + stream (IO[bytes]): A bytes IO object open for reading. - :type metadata: dict - :param metadata: The metadata associated with the upload. + metadata (Dict): The metadata associated with the upload. - :type num_retries: int - :param num_retries: Number of upload retries. (Deprecated: This - argument will be removed in a future release.) + num_retries (int): + Number of upload retries. (Deprecated: This + argument will be removed in a future release.) - :rtype: tuple - :returns: - Pair of + Returns: + Tuple: + Pair of - * The :class:`~google.resumable_media.requests.ResumableUpload` - that was created - * The ``transport`` used to initiate the upload. + * The :class:`~google.resumable_media.requests.ResumableUpload` + that was created + * The ``transport`` used to initiate the upload. """ chunk_size = _DEFAULT_CHUNKSIZE transport = self._http @@ -1765,26 +1835,29 @@ def _initiate_resumable_upload(self, stream, metadata, num_retries): def _do_multipart_upload(self, stream, metadata, size, num_retries): """Perform a multipart upload. - :type stream: IO[bytes] - :param stream: A bytes IO object open for reading. + Args: + stream (IO[bytes]): A bytes IO object open for reading. - :type metadata: dict - :param metadata: The metadata associated with the upload. + metadata (Dict): The metadata associated with the upload. - :type size: int - :param size: The number of bytes to be uploaded (which will be read - from ``stream``). If not provided, the upload will be - concluded once ``stream`` is exhausted (or :data:`None`). + size (int): + The number of bytes to be uploaded (which will be read + from ``stream``). If not provided, the upload will be + concluded once ``stream`` is exhausted (or :data:`None`). - :type num_retries: int - :param num_retries: Number of upload retries. (Deprecated: This - argument will be removed in a future release.) + num_retries (int): + Number of upload retries. (Deprecated: This + argument will be removed in a future release.) - :rtype: :class:`~requests.Response` - :returns: The "200 OK" response object returned after the multipart - upload request. - :raises: :exc:`ValueError` if the ``stream`` has fewer than ``size`` - bytes remaining. + Returns: + requests.Response: + The "200 OK" response object returned after the multipart + upload request. + + Raises: + ValueError: + if the ``stream`` has fewer than ``size`` + bytes remaining. """ data = stream.read(size) if len(data) < size: @@ -1819,25 +1892,25 @@ def copy_table( """Copy one or more tables to another table. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationtablecopy - Arguments: + Args: sources (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ Sequence[ \ Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ] \ ], \ ]): Table or tables to be copied. - destination (Union[ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + destination (Union[ \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): Table into which data is to be copied. @@ -1861,6 +1934,11 @@ def copy_table( Returns: google.cloud.bigquery.job.CopyJob: A new copy job instance. + + Raises: + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.CopyJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) @@ -1889,6 +1967,8 @@ def copy_table( destination = _table_arg_to_table_ref(destination, default_project=self.project) + if job_config: + _verify_job_config_type(job_config, google.cloud.bigquery.job.CopyJobConfig) copy_job = job.CopyJob( job_ref, sources, destination, client=self, job_config=job_config ) @@ -1910,12 +1990,12 @@ def extract_table( """Start a job to extract a table into Cloud Storage files. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationextract - Arguments: + Args: source (Union[ \ - :class:`google.cloud.bigquery.table.Table`, \ - :class:`google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ src, \ ]): Table to be extracted. @@ -1940,12 +2020,16 @@ def extract_table( (Optional) Extra configuration options for the job. retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - :type source: :class:`google.cloud.bigquery.table.TableReference` - :param source: table to be extracted. - + Args: + source (google.cloud.bigquery.table.TableReference): table to be extracted. Returns: google.cloud.bigquery.job.ExtractJob: A new extract job instance. + + Raises: + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.ExtractJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) @@ -1961,6 +2045,10 @@ def extract_table( if isinstance(destination_uris, six.string_types): destination_uris = [destination_uris] + if job_config: + _verify_job_config_type( + job_config, google.cloud.bigquery.job.ExtractJobConfig + ) extract_job = job.ExtractJob( job_ref, source, destination_uris, client=self, job_config=job_config ) @@ -1981,9 +2069,9 @@ def query( """Run a SQL query. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationquery - Arguments: + Args: query (str): SQL query to be executed. Defaults to the standard SQL dialect. Use the ``job_config`` parameter to change dialects. @@ -2010,6 +2098,11 @@ def query( Returns: google.cloud.bigquery.job.QueryJob: A new query job instance. + + Raises: + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.QueryJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) @@ -2021,6 +2114,9 @@ def query( if self._default_query_job_config: if job_config: + _verify_job_config_type( + job_config, google.cloud.bigquery.job.QueryJobConfig + ) # anything that's not defined on the incoming # that is in the default, # should be filled in with the default @@ -2029,6 +2125,10 @@ def query( self._default_query_job_config ) else: + _verify_job_config_type( + self._default_query_job_config, + google.cloud.bigquery.job.QueryJobConfig, + ) job_config = self._default_query_job_config job_ref = job._JobReference(job_id, project=project, location=location) @@ -2045,27 +2145,22 @@ def insert_rows(self, table, rows, selected_fields=None, **kwargs): Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): The destination table for the row data, or a reference to it. - rows (Union[ \ - Sequence[Tuple], \ - Sequence[dict], \ - ]): + rows (Union[Sequence[Tuple], Sequence[dict]]): Row data to be inserted. If a list of tuples is given, each tuple should contain data for each schema field on the current table and in the same order as the schema fields. If a list of dictionaries is given, the keys must include all required fields in the schema. Keys which do not correspond to a field in the schema are ignored. - selected_fields (Sequence[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - ]): + selected_fields (Sequence[google.cloud.bigquery.schema.SchemaField]): The fields to return. Required if ``table`` is a :class:`~google.cloud.bigquery.table.TableReference`. - kwargs (dict): + kwargs (Dict): Keyword arguments to :meth:`~google.cloud.bigquery.client.Client.insert_rows_json`. @@ -2101,6 +2196,55 @@ def insert_rows(self, table, rows, selected_fields=None, **kwargs): return self.insert_rows_json(table, json_rows, **kwargs) + def insert_rows_from_dataframe( + self, table, dataframe, selected_fields=None, chunk_size=500, **kwargs + ): + """Insert rows into a table from a dataframe via the streaming API. + + Args: + table (Union[ \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ + str, \ + ]): + The destination table for the row data, or a reference to it. + dataframe (pandas.DataFrame): + A :class:`~pandas.DataFrame` containing the data to load. + selected_fields (Sequence[google.cloud.bigquery.schema.SchemaField]): + The fields to return. Required if ``table`` is a + :class:`~google.cloud.bigquery.table.TableReference`. + chunk_size (int): + The number of rows to stream in a single chunk. Must be positive. + kwargs (Dict): + Keyword arguments to + :meth:`~google.cloud.bigquery.client.Client.insert_rows_json`. + + Returns: + Sequence[Sequence[Mappings]]: + A list with insert errors for each insert chunk. Each element + is a list containing one mapping per row with insert errors: + the "index" key identifies the row, and the "errors" key + contains a list of the mappings describing one or more problems + with the row. + + Raises: + ValueError: if table's schema is not set + """ + insert_results = [] + + chunk_count = int(math.ceil(len(dataframe) / chunk_size)) + rows_iter = ( + dict(six.moves.zip(dataframe.columns, row)) + for row in dataframe.itertuples(index=False, name=None) + ) + + for _ in range(chunk_count): + rows_chunk = itertools.islice(rows_iter, chunk_size) + result = self.insert_rows(table, rows_chunk, selected_fields, **kwargs) + insert_results.append(result) + + return insert_results + def insert_rows_json( self, table, @@ -2116,33 +2260,36 @@ def insert_rows_json( See https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll - table (Union[ \ - :class:`~google.cloud.bigquery.table.Table` \ - :class:`~google.cloud.bigquery.table.TableReference`, \ - str, \ - ]): - The destination table for the row data, or a reference to it. - json_rows (Sequence[dict]): - Row data to be inserted. Keys must match the table schema fields - and values must be JSON-compatible representations. - row_ids (Sequence[str]): - (Optional) Unique ids, one per row being inserted. If omitted, - unique IDs are created. - skip_invalid_rows (bool): - (Optional) Insert all valid rows of a request, even if invalid - rows exist. The default value is False, which causes the entire - request to fail if any invalid rows exist. - ignore_unknown_values (bool): - (Optional) Accept rows that contain values that do not match the - schema. The unknown values are ignored. Default is False, which - treats unknown values as errors. - template_suffix (str): - (Optional) treat ``name`` as a template table and provide a suffix. - BigQuery will create the table `` + `` based - on the schema of the template table. See - https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables - retry (:class:`google.api_core.retry.Retry`): - (Optional) How to retry the RPC. + Args: + table (Union[ \ + google.cloud.bigquery.table.Table \ + google.cloud.bigquery.table.TableReference, \ + str \ + ]): + The destination table for the row data, or a reference to it. + json_rows (Sequence[Dict]): + Row data to be inserted. Keys must match the table schema fields + and values must be JSON-compatible representations. + row_ids (Optional[Sequence[Optional[str]]]): + Unique IDs, one per row being inserted. An ID can also be + ``None``, indicating that an explicit insert ID should **not** + be used for that row. If the argument is omitted altogether, + unique IDs are created automatically. + skip_invalid_rows (Optional[bool]): + Insert all valid rows of a request, even if invalid rows exist. + The default value is ``False``, which causes the entire request + to fail if any invalid rows exist. + ignore_unknown_values (Optional[bool]): + Accept rows that contain values that do not match the schema. + The unknown values are ignored. Default is ``False``, which + treats unknown values as errors. + template_suffix (Optional[str]): + Treat ``name`` as a template table and provide a suffix. + BigQuery will create the table `` + `` + based on the schema of the template table. See + https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables + retry (Optional[google.api_core.retry.Retry]): + How to retry the RPC. Returns: Sequence[Mappings]: @@ -2188,10 +2335,10 @@ def insert_rows_json( def list_partitions(self, table, retry=DEFAULT_RETRY): """List the partitions in a table. - Arguments: + Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): The table or reference from which to get partition info @@ -2240,18 +2387,16 @@ def list_rows( Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableListItem`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableListItem, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): The table to list, or a reference to it. When the table object does not contain a schema and ``selected_fields`` is not supplied, this method calls ``get_table`` to fetch the table schema. - selected_fields (Sequence[ \ - :class:`~google.cloud.bigquery.schema.SchemaField` \ - ]): + selected_fields (Sequence[google.cloud.bigquery.schema.SchemaField]): The fields to return. If not supplied, data for all columns are downloaded. max_results (int): @@ -2269,7 +2414,7 @@ def list_rows( Optional. The maximum number of rows in each page of results from this request. Non-positive values are ignored. Defaults to a sensible value set by the API. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: @@ -2369,14 +2514,13 @@ def schema_to_json(self, schema_list, destination): def _item_to_project(iterator, resource): """Convert a JSON project to the native object. - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - :type resource: dict - :param resource: An item to be converted to a project. + resource (Dict): An item to be converted to a project. - :rtype: :class:`.Project` - :returns: The next project in the page. + Returns: + google.cloud.bigquery.client.Project: The next project in the page. """ return Project.from_api_repr(resource) @@ -2387,14 +2531,13 @@ def _item_to_project(iterator, resource): def _item_to_dataset(iterator, resource): """Convert a JSON dataset to the native object. - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - :type resource: dict - :param resource: An item to be converted to a dataset. + resource (Dict): An item to be converted to a dataset. - :rtype: :class:`.DatasetListItem` - :returns: The next dataset in the page. + Returns: + google.cloud.bigquery.dataset.DatasetListItem: The next dataset in the page. """ return DatasetListItem(resource) @@ -2402,14 +2545,13 @@ def _item_to_dataset(iterator, resource): def _item_to_job(iterator, resource): """Convert a JSON job to the native object. - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - :type resource: dict - :param resource: An item to be converted to a job. + resource (Dict): An item to be converted to a job. - :rtype: job instance. - :returns: The next job in the page. + Returns: + job instance: The next job in the page. """ return iterator.client.job_from_resource(resource) @@ -2420,8 +2562,7 @@ def _item_to_model(iterator, resource): Args: iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - resource (dict): - An item to be converted to a model. + resource (Dict): An item to be converted to a model. Returns: google.cloud.bigquery.model.Model: The next model in the page. @@ -2435,8 +2576,7 @@ def _item_to_routine(iterator, resource): Args: iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - resource (dict): - An item to be converted to a routine. + resource (Dict): An item to be converted to a routine. Returns: google.cloud.bigquery.routine.Routine: The next routine in the page. @@ -2447,14 +2587,13 @@ def _item_to_routine(iterator, resource): def _item_to_table(iterator, resource): """Convert a JSON table to the native object. - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - :type resource: dict - :param resource: An item to be converted to a table. + resource (Dict): An item to be converted to a table. - :rtype: :class:`~google.cloud.bigquery.table.Table` - :returns: The next table in the page. + Returns: + google.cloud.bigquery.table.Table: The next table in the page. """ return TableListItem(resource) @@ -2462,14 +2601,13 @@ def _item_to_table(iterator, resource): def _make_job_id(job_id, prefix=None): """Construct an ID for a new job. - :type job_id: str or ``NoneType`` - :param job_id: the user-provided job ID + Args: + job_id (Optional[str]): the user-provided job ID. - :type prefix: str or ``NoneType`` - :param prefix: (Optional) the user-provided prefix for a job ID + prefix (Optional[str]): the user-provided prefix for a job ID. - :rtype: str - :returns: A job ID + Returns: + str: A job ID """ if job_id is not None: return job_id @@ -2482,11 +2620,13 @@ def _make_job_id(job_id, prefix=None): def _check_mode(stream): """Check that a stream was opened in read-binary mode. - :type stream: IO[bytes] - :param stream: A bytes IO object open for reading. + Args: + stream (IO[bytes]): A bytes IO object open for reading. - :raises: :exc:`ValueError` if the ``stream.mode`` is a valid attribute - and is not among ``rb``, ``r+b`` or ``rb+``. + Raises: + ValueError: + if the ``stream.mode`` is a valid attribute + and is not among ``rb``, ``r+b`` or ``rb+``. """ mode = getattr(stream, "mode", None) @@ -2507,11 +2647,11 @@ def _check_mode(stream): def _get_upload_headers(user_agent): """Get the headers for an upload request. - :type user_agent: str - :param user_agent: The user-agent for requests. + Args: + user_agent (str): The user-agent for requests. - :rtype: dict - :returns: The headers to be used for the request. + Returns: + Dict: The headers to be used for the request. """ return { "Accept": "application/json", diff --git a/bigquery/google/cloud/bigquery/dataset.py b/bigquery/google/cloud/bigquery/dataset.py index 494c219d4f67..754a2fa00d00 100644 --- a/bigquery/google/cloud/bigquery/dataset.py +++ b/bigquery/google/cloud/bigquery/dataset.py @@ -18,21 +18,13 @@ import six import copy -import re import google.cloud._helpers from google.cloud.bigquery import _helpers from google.cloud.bigquery.model import ModelReference from google.cloud.bigquery.routine import RoutineReference from google.cloud.bigquery.table import TableReference - - -_PROJECT_PREFIX_PATTERN = re.compile( - r""" - (?P\S+\:[^.]+)\.(?P[^.]+)$ -""", - re.VERBOSE, -) +from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration def _get_table_reference(self, table_id): @@ -216,7 +208,7 @@ class DatasetReference(object): """DatasetReferences are pointers to datasets. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets + https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#datasetreference Args: project (str): The ID of the project @@ -299,13 +291,7 @@ def from_string(cls, dataset_id, default_project=None): """ output_dataset_id = dataset_id output_project_id = default_project - with_prefix = _PROJECT_PREFIX_PATTERN.match(dataset_id) - if with_prefix is None: - parts = dataset_id.split(".") - else: - project_id = with_prefix.group("project_id") - dataset_id = with_prefix.group("dataset_id") - parts = [project_id, dataset_id] + parts = _helpers._split_id(dataset_id) if len(parts) == 1 and not default_project: raise ValueError( @@ -361,13 +347,10 @@ class Dataset(object): """Datasets are containers for tables. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets + https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource-dataset Args: - dataset_ref (Union[ \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ - str, \ - ]): + dataset_ref (Union[google.cloud.bigquery.dataset.DatasetReference, str]): A pointer to a dataset. If ``dataset_ref`` is a string, it must include both the project ID and the dataset ID, separated by ``.``. @@ -376,8 +359,10 @@ class Dataset(object): _PROPERTY_TO_API_FIELD = { "access_entries": "access", "created": "creationTime", + "default_partition_expiration_ms": "defaultPartitionExpirationMs", "default_table_expiration_ms": "defaultTableExpirationMs", "friendly_name": "friendlyName", + "default_encryption_configuration": "defaultEncryptionConfiguration", } def __init__(self, dataset_ref): @@ -478,6 +463,34 @@ def self_link(self): """ return self._properties.get("selfLink") + @property + def default_partition_expiration_ms(self): + """Optional[int]: The default partition expiration for all + partitioned tables in the dataset, in milliseconds. + + Once this property is set, all newly-created partitioned tables in + the dataset will have an ``time_paritioning.expiration_ms`` property + set to this value, and changing the value will only affect new + tables, not existing ones. The storage in a partition will have an + expiration time of its partition time plus this value. + + Setting this property overrides the use of + ``default_table_expiration_ms`` for partitioned tables: only one of + ``default_table_expiration_ms`` and + ``default_partition_expiration_ms`` will be used for any new + partitioned table. If you provide an explicit + ``time_partitioning.expiration_ms`` when creating or updating a + partitioned table, that value takes precedence over the default + partition expiration time indicated by this property. + """ + return _helpers._int_or_none( + self._properties.get("defaultPartitionExpirationMs") + ) + + @default_partition_expiration_ms.setter + def default_partition_expiration_ms(self, value): + self._properties["defaultPartitionExpirationMs"] = _helpers._str_or_none(value) + @property def default_table_expiration_ms(self): """Union[int, None]: Default expiration time for tables in the dataset @@ -562,6 +575,30 @@ def labels(self, value): raise ValueError("Pass a dict") self._properties["labels"] = value + @property + def default_encryption_configuration(self): + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom + encryption configuration for all tables in the dataset. + + Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` + if using default encryption. + + See `protecting data with Cloud KMS keys + `_ + in the BigQuery documentation. + """ + prop = self._properties.get("defaultEncryptionConfiguration") + if prop: + prop = EncryptionConfiguration.from_api_repr(prop) + return prop + + @default_encryption_configuration.setter + def default_encryption_configuration(self, value): + api_repr = value + if value: + api_repr = value.to_api_repr() + self._properties["defaultEncryptionConfiguration"] = api_repr + @classmethod def from_string(cls, full_dataset_id): """Construct a dataset from fully-qualified dataset ID. diff --git a/bigquery/google/cloud/bigquery/dbapi/_helpers.py b/bigquery/google/cloud/bigquery/dbapi/_helpers.py index 6e7f58bd4944..651880feac90 100644 --- a/bigquery/google/cloud/bigquery/dbapi/_helpers.py +++ b/bigquery/google/cloud/bigquery/dbapi/_helpers.py @@ -30,40 +30,25 @@ def scalar_to_query_parameter(value, name=None): """Convert a scalar value into a query parameter. - :type value: any - :param value: A scalar value to convert into a query parameter. + Args: + value (Any): + A scalar value to convert into a query parameter. - :type name: str - :param name: (Optional) Name of the query parameter. + name (str): + (Optional) Name of the query parameter. - :rtype: :class:`~google.cloud.bigquery.ScalarQueryParameter` - :returns: - A query parameter corresponding with the type and value of the plain - Python object. - :raises: :class:`~google.cloud.bigquery.dbapi.exceptions.ProgrammingError` - if the type cannot be determined. + Returns: + google.cloud.bigquery.ScalarQueryParameter: + A query parameter corresponding with the type and value of the plain + Python object. + + Raises: + google.cloud.bigquery.dbapi.exceptions.ProgrammingError: + if the type cannot be determined. """ - parameter_type = None + parameter_type = bigquery_scalar_type(value) - if isinstance(value, bool): - parameter_type = "BOOL" - elif isinstance(value, numbers.Integral): - parameter_type = "INT64" - elif isinstance(value, numbers.Real): - parameter_type = "FLOAT64" - elif isinstance(value, decimal.Decimal): - parameter_type = "NUMERIC" - elif isinstance(value, six.text_type): - parameter_type = "STRING" - elif isinstance(value, six.binary_type): - parameter_type = "BYTES" - elif isinstance(value, datetime.datetime): - parameter_type = "DATETIME" if value.tzinfo is None else "TIMESTAMP" - elif isinstance(value, datetime.date): - parameter_type = "DATE" - elif isinstance(value, datetime.time): - parameter_type = "TIME" - else: + if parameter_type is None: raise exceptions.ProgrammingError( "encountered parameter {} with value {} of unexpected type".format( name, value @@ -72,41 +57,107 @@ def scalar_to_query_parameter(value, name=None): return bigquery.ScalarQueryParameter(name, parameter_type, value) +def array_to_query_parameter(value, name=None): + """Convert an array-like value into a query parameter. + + Args: + value (Sequence[Any]): The elements of the array (should not be a + string-like Sequence). + name (Optional[str]): Name of the query parameter. + + Returns: + A query parameter corresponding with the type and value of the plain + Python object. + + Raises: + google.cloud.bigquery.dbapi.exceptions.ProgrammingError: + if the type of array elements cannot be determined. + """ + if not array_like(value): + raise exceptions.ProgrammingError( + "The value of parameter {} must be a sequence that is " + "not string-like.".format(name) + ) + + if not value: + raise exceptions.ProgrammingError( + "Encountered an empty array-like value of parameter {}, cannot " + "determine array elements type.".format(name) + ) + + # Assume that all elements are of the same type, and let the backend handle + # any type incompatibilities among the array elements + array_type = bigquery_scalar_type(value[0]) + if array_type is None: + raise exceptions.ProgrammingError( + "Encountered unexpected first array element of parameter {}, " + "cannot determine array elements type.".format(name) + ) + + return bigquery.ArrayQueryParameter(name, array_type, value) + + def to_query_parameters_list(parameters): """Converts a sequence of parameter values into query parameters. - :type parameters: Sequence[Any] - :param parameters: Sequence of query parameter values. + Args: + parameters (Sequence[Any]): Sequence of query parameter values. - :rtype: List[google.cloud.bigquery.query._AbstractQueryParameter] - :returns: A list of query parameters. + Returns: + List[google.cloud.bigquery.query._AbstractQueryParameter]: + A list of query parameters. """ - return [scalar_to_query_parameter(value) for value in parameters] + result = [] + + for value in parameters: + if isinstance(value, collections_abc.Mapping): + raise NotImplementedError("STRUCT-like parameter values are not supported.") + elif array_like(value): + param = array_to_query_parameter(value) + else: + param = scalar_to_query_parameter(value) + result.append(param) + + return result def to_query_parameters_dict(parameters): """Converts a dictionary of parameter values into query parameters. - :type parameters: Mapping[str, Any] - :param parameters: Dictionary of query parameter values. + Args: + parameters (Mapping[str, Any]): Dictionary of query parameter values. - :rtype: List[google.cloud.bigquery.query._AbstractQueryParameter] - :returns: A list of named query parameters. + Returns: + List[google.cloud.bigquery.query._AbstractQueryParameter]: + A list of named query parameters. """ - return [ - scalar_to_query_parameter(value, name=name) - for name, value in six.iteritems(parameters) - ] + result = [] + + for name, value in six.iteritems(parameters): + if isinstance(value, collections_abc.Mapping): + raise NotImplementedError( + "STRUCT-like parameter values are not supported " + "(parameter {}).".format(name) + ) + elif array_like(value): + param = array_to_query_parameter(value, name=name) + else: + param = scalar_to_query_parameter(value, name=name) + result.append(param) + + return result def to_query_parameters(parameters): """Converts DB-API parameter values into query parameters. - :type parameters: Mapping[str, Any] or Sequence[Any] - :param parameters: A dictionary or sequence of query parameter values. + Args: + parameters (Union[Mapping[str, Any], Sequence[Any]]): + A dictionary or sequence of query parameter values. - :rtype: List[google.cloud.bigquery.query._AbstractQueryParameter] - :returns: A list of query parameters. + Returns: + List[google.cloud.bigquery.query._AbstractQueryParameter]: + A list of query parameters. """ if parameters is None: return [] @@ -115,3 +166,55 @@ def to_query_parameters(parameters): return to_query_parameters_dict(parameters) return to_query_parameters_list(parameters) + + +def bigquery_scalar_type(value): + """Return a BigQuery name of the scalar type that matches the given value. + + If the scalar type name could not be determined (e.g. for non-scalar + values), ``None`` is returned. + + Args: + value (Any) + + Returns: + Optional[str]: The BigQuery scalar type name. + """ + if isinstance(value, bool): + return "BOOL" + elif isinstance(value, numbers.Integral): + return "INT64" + elif isinstance(value, numbers.Real): + return "FLOAT64" + elif isinstance(value, decimal.Decimal): + return "NUMERIC" + elif isinstance(value, six.text_type): + return "STRING" + elif isinstance(value, six.binary_type): + return "BYTES" + elif isinstance(value, datetime.datetime): + return "DATETIME" if value.tzinfo is None else "TIMESTAMP" + elif isinstance(value, datetime.date): + return "DATE" + elif isinstance(value, datetime.time): + return "TIME" + + return None + + +def array_like(value): + """Determine if the given value is array-like. + + Examples of array-like values (as interpreted by this function) are + sequences such as ``list`` and ``tuple``, but not strings and other + iterables such as sets. + + Args: + value (Any) + + Returns: + bool: ``True`` if the value is considered array-like, ``False`` otherwise. + """ + return isinstance(value, collections_abc.Sequence) and not isinstance( + value, (six.text_type, six.binary_type, bytearray) + ) diff --git a/bigquery/google/cloud/bigquery/dbapi/connection.py b/bigquery/google/cloud/bigquery/dbapi/connection.py index 0dbc9143b255..ee7d0dc3cc59 100644 --- a/bigquery/google/cloud/bigquery/dbapi/connection.py +++ b/bigquery/google/cloud/bigquery/dbapi/connection.py @@ -21,8 +21,8 @@ class Connection(object): """DB-API Connection to Google BigQuery. - :type client: :class:`~google.cloud.bigquery.Client` - :param client: A client used to connect to BigQuery. + Args: + client (google.cloud.bigquery.Client): A client used to connect to BigQuery. """ def __init__(self, client): @@ -37,8 +37,8 @@ def commit(self): def cursor(self): """Return a new cursor object. - :rtype: :class:`~google.cloud.bigquery.dbapi.Cursor` - :returns: A DB-API cursor that uses this connection. + Returns: + google.cloud.bigquery.dbapi.Cursor: A DB-API cursor that uses this connection. """ return cursor.Cursor(self) @@ -46,13 +46,13 @@ def cursor(self): def connect(client=None): """Construct a DB-API connection to Google BigQuery. - :type client: :class:`~google.cloud.bigquery.Client` - :param client: - (Optional) A client used to connect to BigQuery. If not passed, a - client is created using default options inferred from the environment. + Args: + client (google.cloud.bigquery.Client): + (Optional) A client used to connect to BigQuery. If not passed, a + client is created using default options inferred from the environment. - :rtype: :class:`~google.cloud.bigquery.dbapi.Connection` - :returns: A new DB-API connection to BigQuery. + Returns: + google.cloud.bigquery.dbapi.Connection: A new DB-API connection to BigQuery. """ if client is None: client = bigquery.Client() diff --git a/bigquery/google/cloud/bigquery/dbapi/cursor.py b/bigquery/google/cloud/bigquery/dbapi/cursor.py index 1fbd9fb10cc4..a3e6ea5be87e 100644 --- a/bigquery/google/cloud/bigquery/dbapi/cursor.py +++ b/bigquery/google/cloud/bigquery/dbapi/cursor.py @@ -49,8 +49,9 @@ class Cursor(object): """DB-API Cursor to Google BigQuery. - :type connection: :class:`~google.cloud.bigquery.dbapi.Connection` - :param connection: A DB-API connection to Google BigQuery. + Args: + connection (google.cloud.bigquery.dbapi.Connection): + A DB-API connection to Google BigQuery. """ def __init__(self, connection): @@ -61,8 +62,10 @@ def __init__(self, connection): # cannot be determined by the interface. self.rowcount = -1 # Per PEP 249: The arraysize attribute defaults to 1, meaning to fetch - # a single row at a time. - self.arraysize = 1 + # a single row at a time. However, we deviate from that, and set the + # default to None, allowing the backend to automatically determine the + # most appropriate size. + self.arraysize = None self._query_data = None self._query_job = None @@ -72,8 +75,9 @@ def close(self): def _set_description(self, schema): """Set description from schema. - :type schema: Sequence[google.cloud.bigquery.schema.SchemaField] - :param schema: A description of fields in the schema. + Args: + schema (Sequence[google.cloud.bigquery.schema.SchemaField]): + A description of fields in the schema. """ if schema is None: self.description = None @@ -101,9 +105,9 @@ def _set_rowcount(self, query_results): query, but if it was a DML statement, it sets rowcount to the number of modified rows. - :type query_results: - :class:`~google.cloud.bigquery.query._QueryResults` - :param query_results: results of a query + Args: + query_results (google.cloud.bigquery.query._QueryResults): + Results of a query. """ total_rows = 0 num_dml_affected_rows = query_results.num_dml_affected_rows @@ -114,7 +118,7 @@ def _set_rowcount(self, query_results): total_rows = num_dml_affected_rows self.rowcount = total_rows - def execute(self, operation, parameters=None, job_id=None): + def execute(self, operation, parameters=None, job_id=None, job_config=None): """Prepare and execute a database operation. .. note:: @@ -136,16 +140,18 @@ def execute(self, operation, parameters=None, job_id=None): yet supported. See: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/3524 - :type operation: str - :param operation: A Google BigQuery query string. + Args: + operation (str): A Google BigQuery query string. - :type parameters: Mapping[str, Any] or Sequence[Any] - :param parameters: - (Optional) dictionary or sequence of parameter values. + parameters (Union[Mapping[str, Any], Sequence[Any]]): + (Optional) dictionary or sequence of parameter values. - :type job_id: str - :param job_id: (Optional) The job_id to use. If not set, a job ID - is generated at random. + job_id (str): + (Optional) The job_id to use. If not set, a job ID + is generated at random. + + job_config (google.cloud.bigquery.job.QueryJobConfig): + (Optional) Extra configuration options for the query job. """ self._query_data = None self._query_job = None @@ -158,9 +164,8 @@ def execute(self, operation, parameters=None, job_id=None): formatted_operation = _format_operation(operation, parameters=parameters) query_parameters = _helpers.to_query_parameters(parameters) - config = job.QueryJobConfig() + config = job_config or job.QueryJobConfig(use_legacy_sql=False) config.query_parameters = query_parameters - config.use_legacy_sql = False self._query_job = client.query( formatted_operation, job_config=config, job_id=job_id ) @@ -178,11 +183,11 @@ def execute(self, operation, parameters=None, job_id=None): def executemany(self, operation, seq_of_parameters): """Prepare and execute a database operation multiple times. - :type operation: str - :param operation: A Google BigQuery query string. + Args: + operation (str): A Google BigQuery query string. - :type seq_of_parameters: Sequence[Mapping[str, Any] or Sequence[Any]] - :param parameters: Sequence of many sets of parameter values. + seq_of_parameters (Union[Sequence[Mapping[str, Any], Sequence[Any]]]): + Sequence of many sets of parameter values. """ for parameters in seq_of_parameters: self.execute(operation, parameters) @@ -217,12 +222,13 @@ def _try_fetch(self, size=None): def fetchone(self): """Fetch a single row from the results of the last ``execute*()`` call. - :rtype: tuple - :returns: - A tuple representing a row or ``None`` if no more data is - available. - :raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError` - if called before ``execute()``. + Returns: + Tuple: + A tuple representing a row or ``None`` if no more data is + available. + + Raises: + google.cloud.bigquery.dbapi.InterfaceError: if called before ``execute()``. """ self._try_fetch() try: @@ -238,18 +244,23 @@ def fetchmany(self, size=None): Set the ``arraysize`` attribute before calling ``execute()`` to set the batch size. - :type size: int - :param size: - (Optional) Maximum number of rows to return. Defaults to the - ``arraysize`` property value. + Args: + size (int): + (Optional) Maximum number of rows to return. Defaults to the + ``arraysize`` property value. If ``arraysize`` is not set, it + defaults to ``1``. - :rtype: List[tuple] - :returns: A list of rows. - :raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError` - if called before ``execute()``. + Returns: + List[Tuple]: A list of rows. + + Raises: + google.cloud.bigquery.dbapi.InterfaceError: if called before ``execute()``. """ if size is None: - size = self.arraysize + # Since self.arraysize can be None (a deviation from PEP 249), + # use an actual PEP 249 default of 1 in such case (*some* number + # is needed here). + size = self.arraysize if self.arraysize else 1 self._try_fetch(size=size) rows = [] @@ -264,10 +275,11 @@ def fetchmany(self, size=None): def fetchall(self): """Fetch all remaining results from the last ``execute*()`` call. - :rtype: List[tuple] - :returns: A list of all the rows in the results. - :raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError` - if called before ``execute()``. + Returns: + List[Tuple]: A list of all the rows in the results. + + Raises: + google.cloud.bigquery.dbapi.InterfaceError: if called before ``execute()``. """ self._try_fetch() return list(self._query_data) @@ -285,17 +297,18 @@ def _format_operation_list(operation, parameters): The input operation will be a query like ``SELECT %s`` and the output will be a query like ``SELECT ?``. - :type operation: str - :param operation: A Google BigQuery query string. + Args: + operation (str): A Google BigQuery query string. - :type parameters: Sequence[Any] - :param parameters: Sequence of parameter values. + parameters (Sequence[Any]): Sequence of parameter values. - :rtype: str - :returns: A formatted query string. - :raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError` - if a parameter used in the operation is not found in the - ``parameters`` argument. + Returns: + str: A formatted query string. + + Raises: + google.cloud.bigquery.dbapi.ProgrammingError: + if a parameter used in the operation is not found in the + ``parameters`` argument. """ formatted_params = ["?" for _ in parameters] @@ -311,17 +324,18 @@ def _format_operation_dict(operation, parameters): The input operation will be a query like ``SELECT %(namedparam)s`` and the output will be a query like ``SELECT @namedparam``. - :type operation: str - :param operation: A Google BigQuery query string. + Args: + operation (str): A Google BigQuery query string. + + parameters (Mapping[str, Any]): Dictionary of parameter values. - :type parameters: Mapping[str, Any] - :param parameters: Dictionary of parameter values. + Returns: + str: A formatted query string. - :rtype: str - :returns: A formatted query string. - :raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError` - if a parameter used in the operation is not found in the - ``parameters`` argument. + Raises: + google.cloud.bigquery.dbapi.ProgrammingError: + if a parameter used in the operation is not found in the + ``parameters`` argument. """ formatted_params = {} for name in parameters: @@ -337,17 +351,19 @@ def _format_operation_dict(operation, parameters): def _format_operation(operation, parameters=None): """Formats parameters in operation in way BigQuery expects. - :type: str - :param operation: A Google BigQuery query string. + Args: + operation (str): A Google BigQuery query string. + + parameters (Union[Mapping[str, Any], Sequence[Any]]): + Optional parameter values. - :type: Mapping[str, Any] or Sequence[Any] - :param parameters: Optional parameter values. + Returns: + str: A formatted query string. - :rtype: str - :returns: A formatted query string. - :raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError` - if a parameter used in the operation is not found in the - ``parameters`` argument. + Raises: + google.cloud.bigquery.dbapi.ProgrammingError: + if a parameter used in the operation is not found in the + ``parameters`` argument. """ if parameters is None: return operation diff --git a/bigquery/google/cloud/bigquery/dbapi/types.py b/bigquery/google/cloud/bigquery/dbapi/types.py index 3c8c454a011a..14917820cd38 100644 --- a/bigquery/google/cloud/bigquery/dbapi/types.py +++ b/bigquery/google/cloud/bigquery/dbapi/types.py @@ -33,11 +33,11 @@ def Binary(string): """Contruct a DB-API binary value. - :type string: str - :param string: A string to encode as a binary value. + Args: + string (str): A string to encode as a binary value. - :rtype: bytes - :returns: The UTF-8 encoded bytes representing the string. + Returns: + bytes: The UTF-8 encoded bytes representing the string. """ return string.encode("utf-8") @@ -45,16 +45,15 @@ def Binary(string): def TimeFromTicks(ticks, tz=None): """Construct a DB-API time value from the given ticks value. - :type ticks: float - :param ticks: - a number of seconds since the epoch; see the documentation of the - standard Python time module for details. + Args: + ticks (float): + a number of seconds since the epoch; see the documentation of the + standard Python time module for details. - :type tz: :class:`datetime.tzinfo` - :param tz: (Optional) time zone to use for conversion + tz (datetime.tzinfo): (Optional) time zone to use for conversion - :rtype: :class:`datetime.time` - :returns: time represented by ticks. + Returns: + datetime.time: time represented by ticks. """ dt = datetime.datetime.fromtimestamp(ticks, tz=tz) return dt.timetz() diff --git a/bigquery/google/cloud/bigquery/encryption_configuration.py b/bigquery/google/cloud/bigquery/encryption_configuration.py new file mode 100644 index 000000000000..ba04ae2c45a7 --- /dev/null +++ b/bigquery/google/cloud/bigquery/encryption_configuration.py @@ -0,0 +1,84 @@ +# Copyright 2015 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define class for the custom encryption configuration.""" + +import copy + + +class EncryptionConfiguration(object): + """Custom encryption configuration (e.g., Cloud KMS keys). + + Args: + kms_key_name (str): resource ID of Cloud KMS key used for encryption + """ + + def __init__(self, kms_key_name=None): + self._properties = {} + if kms_key_name is not None: + self._properties["kmsKeyName"] = kms_key_name + + @property + def kms_key_name(self): + """str: Resource ID of Cloud KMS key + + Resource ID of Cloud KMS key or :data:`None` if using default + encryption. + """ + return self._properties.get("kmsKeyName") + + @kms_key_name.setter + def kms_key_name(self, value): + self._properties["kmsKeyName"] = value + + @classmethod + def from_api_repr(cls, resource): + """Construct an encryption configuration from its API representation + + Args: + resource (Dict[str, object]): + An encryption configuration representation as returned from + the API. + + Returns: + google.cloud.bigquery.table.EncryptionConfiguration: + An encryption configuration parsed from ``resource``. + """ + config = cls() + config._properties = copy.deepcopy(resource) + return config + + def to_api_repr(self): + """Construct the API resource representation of this encryption + configuration. + + Returns: + Dict[str, object]: + Encryption configuration as represented as an API resource + """ + return copy.deepcopy(self._properties) + + def __eq__(self, other): + if not isinstance(other, EncryptionConfiguration): + return NotImplemented + return self.kms_key_name == other.kms_key_name + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.kms_key_name) + + def __repr__(self): + return "EncryptionConfiguration({})".format(self.kms_key_name) diff --git a/bigquery/google/cloud/bigquery/external_config.py b/bigquery/google/cloud/bigquery/external_config.py index 048c2178a654..c637d37d185c 100644 --- a/bigquery/google/cloud/bigquery/external_config.py +++ b/bigquery/google/cloud/bigquery/external_config.py @@ -26,6 +26,7 @@ from google.cloud.bigquery._helpers import _to_bytes from google.cloud.bigquery._helpers import _bytes_to_json from google.cloud.bigquery._helpers import _int_or_none +from google.cloud.bigquery._helpers import _str_or_none from google.cloud.bigquery.schema import SchemaField @@ -175,8 +176,7 @@ def from_api_repr(cls, resource): API. Returns: - :class:`~.external_config.BigtableColumn`: - Configuration parsed from ``resource``. + external_config.BigtableColumn: Configuration parsed from ``resource``. """ config = cls() config._properties = copy.deepcopy(resource) @@ -248,7 +248,7 @@ def type_(self, value): @property def columns(self): - """List[:class:`~.external_config.BigtableColumn`]: Lists of columns + """List[BigtableColumn]: Lists of columns that should be exposed as individual fields. See @@ -368,8 +368,7 @@ def from_api_repr(cls, resource): API. Returns: - :class:`~.external_config.BigtableOptions`: - Configuration parsed from ``resource``. + BigtableOptions: Configuration parsed from ``resource``. """ config = cls() config._properties = copy.deepcopy(resource) @@ -475,8 +474,7 @@ def to_api_repr(self): """Build an API representation of this object. Returns: - Dict[str, Any]: - A dictionary in the format used by the BigQuery API. + Dict[str, Any]: A dictionary in the format used by the BigQuery API. """ return copy.deepcopy(self._properties) @@ -492,8 +490,7 @@ def from_api_repr(cls, resource): API. Returns: - :class:`~.external_config.CSVOptions`: - Configuration parsed from ``resource``. + CSVOptions: Configuration parsed from ``resource``. """ config = cls() config._properties = copy.deepcopy(resource) @@ -524,12 +521,24 @@ def skip_leading_rows(self): def skip_leading_rows(self, value): self._properties["skipLeadingRows"] = str(value) + @property + def range(self): + """str: The range of a sheet that BigQuery will query from. + + See + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#GoogleSheetsOptions + """ + return _str_or_none(self._properties.get("range")) + + @range.setter + def range(self, value): + self._properties["range"] = value + def to_api_repr(self): """Build an API representation of this object. Returns: - Dict[str, Any]: - A dictionary in the format used by the BigQuery API. + Dict[str, Any]: A dictionary in the format used by the BigQuery API. """ return copy.deepcopy(self._properties) @@ -545,8 +554,7 @@ def from_api_repr(cls, resource): API. Returns: - :class:`~.external_config.GoogleSheetsOptions`: - Configuration parsed from ``resource``. + GoogleSheetsOptions: Configuration parsed from ``resource``. """ config = cls() config._properties = copy.deepcopy(resource) @@ -560,7 +568,7 @@ class ExternalConfig(object): """Description of an external data source. Args: - source_format (:class:`~.external_config.ExternalSourceFormat`): + source_format (ExternalSourceFormat): See :attr:`source_format`. """ @@ -705,8 +713,7 @@ def from_api_repr(cls, resource): API. Returns: - :class:`~.external_config.ExternalConfig`: - Configuration parsed from ``resource``. + ExternalConfig: Configuration parsed from ``resource``. """ config = cls(resource["sourceFormat"]) for optcls in _OPTION_CLASSES: diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 155474536d14..a8d797f4bef5 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -27,7 +27,9 @@ from google.cloud.bigquery.dataset import Dataset from google.cloud.bigquery.dataset import DatasetListItem from google.cloud.bigquery.dataset import DatasetReference +from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration from google.cloud.bigquery.external_config import ExternalConfig +from google.cloud.bigquery import _helpers from google.cloud.bigquery.query import _query_param_from_api_repr from google.cloud.bigquery.query import ArrayQueryParameter from google.cloud.bigquery.query import ScalarQueryParameter @@ -36,13 +38,13 @@ from google.cloud.bigquery.retry import DEFAULT_RETRY from google.cloud.bigquery.routine import RoutineReference from google.cloud.bigquery.schema import SchemaField +from google.cloud.bigquery.schema import _to_schema_fields from google.cloud.bigquery.table import _EmptyRowIterator -from google.cloud.bigquery.table import EncryptionConfiguration +from google.cloud.bigquery.table import RangePartitioning from google.cloud.bigquery.table import _table_arg_to_table_ref from google.cloud.bigquery.table import TableReference from google.cloud.bigquery.table import Table from google.cloud.bigquery.table import TimePartitioning -from google.cloud.bigquery import _helpers _DONE_STATE = "DONE" _STOPPED_REASON = "stopped" @@ -80,11 +82,11 @@ def _error_result_to_exception(error_result): .. _troubleshooting errors: https://cloud.google.com/bigquery\ /troubleshooting-errors - :type error_result: Mapping[str, str] - :param error_result: The error result from BigQuery. + Args: + error_result (Mapping[str, str]): The error result from BigQuery. - :rtype google.cloud.exceptions.GoogleCloudError: - :returns: The mapped exception. + Returns: + google.cloud.exceptions.GoogleCloudError: The mapped exception. """ reason = error_result.get("reason") status_code = _ERROR_REASON_TO_EXCEPTION.get( @@ -332,12 +334,46 @@ def job_id(self): """str: ID of the job.""" return _helpers._get_sub_prop(self._properties, ["jobReference", "jobId"]) + @property + def parent_job_id(self): + """Return the ID of the parent job. + + See: + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics.FIELDS.parent_job_id + + Returns: + Optional[str]: parent job id. + """ + return _helpers._get_sub_prop(self._properties, ["statistics", "parentJobId"]) + + @property + def script_statistics(self): + resource = _helpers._get_sub_prop( + self._properties, ["statistics", "scriptStatistics"] + ) + if resource is None: + return None + return ScriptStatistics(resource) + + @property + def num_child_jobs(self): + """The number of child jobs executed. + + See: + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics.FIELDS.num_child_jobs + + Returns: + int + """ + count = _helpers._get_sub_prop(self._properties, ["statistics", "numChildJobs"]) + return int(count) if count is not None else 0 + @property def project(self): """Project bound to the job. - :rtype: str - :returns: the project (derived from the client). + Returns: + str: the project (derived from the client). """ return _helpers._get_sub_prop(self._properties, ["jobReference", "projectId"]) @@ -349,13 +385,14 @@ def location(self): def _require_client(self, client): """Check client or verify over-ride. - :type client: :class:`~google.cloud.bigquery.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. + Args: + client (Optional[google.cloud.bigquery.client.Client]): + the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. - :rtype: :class:`google.cloud.bigquery.client.Client` - :returns: The client passed in or the currently bound client. + Returns: + google.cloud.bigquery.client.Client: + The client passed in or the currently bound client. """ if client is None: client = self._client @@ -363,10 +400,10 @@ def _require_client(self, client): @property def job_type(self): - """Type of job + """Type of job. - :rtype: str - :returns: one of 'load', 'copy', 'extract', 'query' + Returns: + str: one of 'load', 'copy', 'extract', 'query'. """ return self._JOB_TYPE @@ -374,8 +411,8 @@ def job_type(self): def path(self): """URL path for the job's APIs. - :rtype: str - :returns: the path based on project and job ID. + Returns: + str: the path based on project and job ID. """ return "/projects/%s/jobs/%s" % (self.project, self.job_id) @@ -388,8 +425,8 @@ def labels(self): def etag(self): """ETag for the job resource. - :rtype: str, or ``NoneType`` - :returns: the ETag (None until set from the server). + Returns: + Optional[str]: the ETag (None until set from the server). """ return self._properties.get("etag") @@ -397,8 +434,8 @@ def etag(self): def self_link(self): """URL for the job resource. - :rtype: str, or ``NoneType`` - :returns: the URL (None until set from the server). + Returns: + Optional[str]: the URL (None until set from the server). """ return self._properties.get("selfLink") @@ -406,8 +443,8 @@ def self_link(self): def user_email(self): """E-mail address of user who submitted the job. - :rtype: str, or ``NoneType`` - :returns: the URL (None until set from the server). + Returns: + Optional[str]: the URL (None until set from the server). """ return self._properties.get("user_email") @@ -415,8 +452,9 @@ def user_email(self): def created(self): """Datetime at which the job was created. - :rtype: ``datetime.datetime``, or ``NoneType`` - :returns: the creation time (None until set from the server). + Returns: + Optional[datetime.datetime]: + the creation time (None until set from the server). """ statistics = self._properties.get("statistics") if statistics is not None: @@ -428,8 +466,9 @@ def created(self): def started(self): """Datetime at which the job was started. - :rtype: ``datetime.datetime``, or ``NoneType`` - :returns: the start time (None until set from the server). + Returns: + Optional[datetime.datetime]: + the start time (None until set from the server). """ statistics = self._properties.get("statistics") if statistics is not None: @@ -441,8 +480,9 @@ def started(self): def ended(self): """Datetime at which the job finished. - :rtype: ``datetime.datetime``, or ``NoneType`` - :returns: the end time (None until set from the server). + Returns: + Optional[datetime.datetime]: + the end time (None until set from the server). """ statistics = self._properties.get("statistics") if statistics is not None: @@ -459,8 +499,8 @@ def _job_statistics(self): def error_result(self): """Error information about the job as a whole. - :rtype: mapping, or ``NoneType`` - :returns: the error information (None until set from the server). + Returns: + Optional[Mapping]: the error information (None until set from the server). """ status = self._properties.get("status") if status is not None: @@ -470,8 +510,9 @@ def error_result(self): def errors(self): """Information about individual errors generated by the job. - :rtype: list of mappings, or ``NoneType`` - :returns: the error information (None until set from the server). + Returns: + Optional[List[Mapping]]: + the error information (None until set from the server). """ status = self._properties.get("status") if status is not None: @@ -481,8 +522,9 @@ def errors(self): def state(self): """Status of the job. - :rtype: str, or ``NoneType`` - :returns: the state (None until set from the server). + Returns: + Optional[str]: + the state (None until set from the server). """ status = self._properties.get("status") if status is not None: @@ -499,8 +541,8 @@ def _copy_configuration_properties(self, configuration): def _set_properties(self, api_response): """Update properties from resource in body of ``api_response`` - :type api_response: dict - :param api_response: response returned from an API call + Args: + api_response (Dict): response returned from an API call. """ cleaned = api_response.copy() self._scrub_local_properties(cleaned) @@ -524,14 +566,18 @@ def _set_properties(self, api_response): def _get_resource_config(cls, resource): """Helper for :meth:`from_api_repr` - :type resource: dict - :param resource: resource for the job + Args: + resource (Dict): resource for the job. + + Returns: + (str, Dict): + tuple (string, dict), where the first element is the + job ID and the second contains job-specific configuration. - :rtype: dict - :returns: tuple (string, dict), where the first element is the - job ID and the second contains job-specific configuration. - :raises: :class:`KeyError` if the resource has no identifier, or - is missing the appropriate configuration. + Raises: + KeyError: + If the resource has no identifier, or + is missing the appropriate configuration. """ if "jobReference" not in resource or "jobId" not in resource["jobReference"]: raise KeyError( @@ -591,16 +637,15 @@ def exists(self, client=None, retry=DEFAULT_RETRY): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get - :type client: :class:`~google.cloud.bigquery.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. + Args: + client (Optional[google.cloud.bigquery.client.Client]): + the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. - :type retry: :class:`google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - :rtype: bool - :returns: Boolean indicating existence of the job. + Returns: + bool: Boolean indicating existence of the job. """ client = self._require_client(client) @@ -623,13 +668,12 @@ def reload(self, client=None, retry=DEFAULT_RETRY): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get - :type client: :class:`~google.cloud.bigquery.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. + Args: + client (Optional[google.cloud.bigquery.client.Client]): + the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. - :type retry: :class:`google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. """ client = self._require_client(client) @@ -648,13 +692,13 @@ def cancel(self, client=None): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/cancel - :type client: :class:`~google.cloud.bigquery.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. + Args: + client (Optional[google.cloud.bigquery.client.Client]): + the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. - :rtype: bool - :returns: Boolean indicating that the cancel request was sent. + Returns: + bool: Boolean indicating that the cancel request was sent. """ client = self._require_client(client) @@ -697,11 +741,11 @@ def _set_future_result(self): def done(self, retry=DEFAULT_RETRY): """Refresh the job and checks if it is complete. - :type retry: :class:`google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. + Args: + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - :rtype: bool - :returns: True if the job is complete, False otherwise. + Returns: + bool: True if the job is complete, False otherwise. """ # Do not refresh is the state is already done, as the job will not # change once complete. @@ -712,21 +756,21 @@ def done(self, retry=DEFAULT_RETRY): def result(self, timeout=None, retry=DEFAULT_RETRY): """Start the job and wait for it to complete and get the result. - :type timeout: float - :param timeout: - How long (in seconds) to wait for job to complete before raising - a :class:`concurrent.futures.TimeoutError`. + Args: + timeout (float): + How long (in seconds) to wait for job to complete before raising + a :class:`concurrent.futures.TimeoutError`. - :type retry: :class:`google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - :rtype: _AsyncJob - :returns: This instance. + Returns: + _AsyncJob: This instance. - :raises: - :class:`~google.cloud.exceptions.GoogleCloudError` if the job - failed or :class:`concurrent.futures.TimeoutError` if the job did - not complete in the given timeout. + Raises: + google.cloud.exceptions.GoogleCloudError: + if the job failed. + concurrent.futures.TimeoutError: + if the job did not complete in the given timeout. """ if self.state is None: self._begin(retry=retry) @@ -740,8 +784,8 @@ def cancelled(self): cancelled in the API. This method is here to satisfy the interface for :class:`google.api_core.future.Future`. - :rtype: bool - :returns: False + Returns: + bool: False """ return ( self.error_result is not None @@ -752,7 +796,7 @@ def cancelled(self): class _JobConfig(object): """Abstract base class for job configuration objects. - Arguments: + Args: job_type (str): The key to use for the job configuration. """ @@ -795,10 +839,10 @@ def _get_sub_prop(self, key, default=None): _helpers._get_sub_prop( self._properties, ['query', 'destinationTable']) - Arguments: + Args: key (str): - Key for the value to get in the - ``self._properties[self._job_type]`` dictionary. + Key for the value to get in the + ``self._properties[self._job_type]`` dictionary. default (object): (Optional) Default value to return if the key is not found. Defaults to :data:`None`. @@ -824,10 +868,10 @@ def _set_sub_prop(self, key, value): _helper._set_sub_prop( self._properties, ['query', 'useLegacySql'], False) - Arguments: + Args: key (str): - Key to set in the ``self._properties[self._job_type]`` - dictionary. + Key to set in the ``self._properties[self._job_type]`` + dictionary. value (object): Value to set. """ _helpers._set_sub_prop(self._properties, [self._job_type, key], value) @@ -846,18 +890,18 @@ def _del_sub_prop(self, key): _helper._del_sub_prop( self._properties, ['query', 'useLegacySql']) - Arguments: + Args: key (str): - Key to remove in the ``self._properties[self._job_type]`` - dictionary. + Key to remove in the ``self._properties[self._job_type]`` + dictionary. """ _helpers._del_sub_prop(self._properties, [self._job_type, key]) def to_api_repr(self): """Build an API representation of the job config. - :rtype: dict - :returns: A dictionary in the format used by the BigQuery API. + Returns: + Dict: A dictionary in the format used by the BigQuery API. """ return copy.deepcopy(self._properties) @@ -868,12 +912,12 @@ def _fill_from_default(self, default_job_config): config. The merge is done at the top-level as well as for keys one level below the job type. - Arguments: + Args: default_job_config (google.cloud.bigquery.job._JobConfig): The default job config that will be used to fill in self. Returns: - google.cloud.bigquery.job._JobConfig A new (merged) job config. + google.cloud.bigquery.job._JobConfig: A new (merged) job config. """ if self._job_type != default_job_config._job_type: raise TypeError( @@ -899,13 +943,13 @@ def _fill_from_default(self, default_job_config): def from_api_repr(cls, resource): """Factory: construct a job configuration given its API representation - :type resource: dict - :param resource: - An extract job configuration in the same representation as is - returned from the API. + Args: + resource (Dict): + An extract job configuration in the same representation as is + returned from the API. - :rtype: :class:`google.cloud.bigquery.job._JobConfig` - :returns: Configuration parsed from ``resource``. + Returns: + google.cloud.bigquery.job._JobConfig: Configuration parsed from ``resource``. """ config = cls() config._properties = copy.deepcopy(resource) @@ -928,7 +972,7 @@ def allow_jagged_rows(self): """bool: Allow missing trailing optional columns (CSV only). See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.allowJaggedRows + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.allow_jagged_rows """ return self._get_sub_prop("allowJaggedRows") @@ -941,7 +985,7 @@ def allow_quoted_newlines(self): """bool: Allow quoted data containing newline characters (CSV only). See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.allowQuotedNewlines + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.allow_quoted_newlines """ return self._get_sub_prop("allowQuotedNewlines") @@ -954,7 +998,7 @@ def autodetect(self): """bool: Automatically infer the schema from a sample of the data. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.autodetect + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.autodetect """ return self._get_sub_prop("autodetect") @@ -996,7 +1040,7 @@ def create_disposition(self): for creating tables. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.createDisposition + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.create_disposition """ return self._get_sub_prop("createDisposition") @@ -1006,14 +1050,14 @@ def create_disposition(self, value): @property def destination_encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the destination table. Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` if using default encryption. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationEncryptionConfiguration + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.destination_encryption_configuration """ prop = self._get_sub_prop("destinationEncryptionConfiguration") if prop is not None: @@ -1034,7 +1078,7 @@ def destination_table_description(self): """Union[str, None] name given to destination table. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTableProperties.description + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#DestinationTableProperties.FIELDS.description """ prop = self._get_sub_prop("destinationTableProperties") if prop is not None: @@ -1053,7 +1097,7 @@ def destination_table_friendly_name(self): """Union[str, None] name given to destination table. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTableProperties.friendlyName + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#DestinationTableProperties.FIELDS.friendly_name """ prop = self._get_sub_prop("destinationTableProperties") if prop is not None: @@ -1073,7 +1117,7 @@ def encoding(self): data. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.encoding + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.encoding """ return self._get_sub_prop("encoding") @@ -1086,7 +1130,7 @@ def field_delimiter(self): """str: The separator for fields in a CSV file. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.fieldDelimiter + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.field_delimiter """ return self._get_sub_prop("fieldDelimiter") @@ -1099,7 +1143,7 @@ def ignore_unknown_values(self): """bool: Ignore extra values not represented in the table schema. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.ignoreUnknownValues + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.ignore_unknown_values """ return self._get_sub_prop("ignoreUnknownValues") @@ -1112,7 +1156,7 @@ def max_bad_records(self): """int: Number of invalid rows to ignore. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.maxBadRecords + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.max_bad_records """ return _helpers._int_or_none(self._get_sub_prop("maxBadRecords")) @@ -1125,7 +1169,7 @@ def null_marker(self): """str: Represents a null value (CSV only). See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.nullMarker + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.null_marker """ return self._get_sub_prop("nullMarker") @@ -1138,7 +1182,7 @@ def quote_character(self): """str: Character used to quote data sections (CSV only). See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.quote + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.quote """ return self._get_sub_prop("quote") @@ -1146,13 +1190,49 @@ def quote_character(self): def quote_character(self, value): self._set_sub_prop("quote", value) + @property + def range_partitioning(self): + """Optional[google.cloud.bigquery.table.RangePartitioning]: + Configures range-based partitioning for destination table. + + .. note:: + **Beta**. The integer range partitioning feature is in a + pre-release state and might change or have limited support. + + Only specify at most one of + :attr:`~google.cloud.bigquery.job.LoadJobConfig.time_partitioning` or + :attr:`~google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. + + Raises: + ValueError: + If the value is not + :class:`~google.cloud.bigquery.table.RangePartitioning` or + :data:`None`. + """ + resource = self._get_sub_prop("rangePartitioning") + if resource is not None: + return RangePartitioning(_properties=resource) + + @range_partitioning.setter + def range_partitioning(self, value): + resource = value + if isinstance(value, RangePartitioning): + resource = value._properties + elif value is not None: + raise ValueError( + "Expected value to be RangePartitioning or None, got {}.".format(value) + ) + self._set_sub_prop("rangePartitioning", resource) + @property def schema(self): - """List[google.cloud.bigquery.schema.SchemaField]: Schema of the - destination table. + """Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]: Schema of the destination table. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.schema """ schema = _helpers._get_sub_prop(self._properties, ["load", "schema", "fields"]) if schema is None: @@ -1165,8 +1245,8 @@ def schema(self, value): self._del_sub_prop("schema") return - if not all(hasattr(field, "to_api_repr") for field in value): - raise ValueError("Schema items must be fields") + value = _to_schema_fields(value) + _helpers._set_sub_prop( self._properties, ["load", "schema", "fields"], @@ -1190,7 +1270,7 @@ def skip_leading_rows(self): """int: Number of rows to skip when reading data (CSV only). See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.skipLeadingRows + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.skip_leading_rows """ return _helpers._int_or_none(self._get_sub_prop("skipLeadingRows")) @@ -1203,7 +1283,7 @@ def source_format(self): """google.cloud.bigquery.job.SourceFormat: File format of the data. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.sourceFormat + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.source_format """ return self._get_sub_prop("sourceFormat") @@ -1215,6 +1295,10 @@ def source_format(self, value): def time_partitioning(self): """google.cloud.bigquery.table.TimePartitioning: Specifies time-based partitioning for the destination table. + + Only specify at most one of + :attr:`~google.cloud.bigquery.job.LoadJobConfig.time_partitioning` or + :attr:`~google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. """ prop = self._get_sub_prop("timePartitioning") if prop is not None: @@ -1248,7 +1332,7 @@ def write_disposition(self): the destination table already exists. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.writeDisposition + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.write_disposition """ return self._get_sub_prop("writeDisposition") @@ -1262,21 +1346,19 @@ class LoadJob(_AsyncJob): Can load from Google Cloud Storage URIs or from a file. - :type job_id: str - :param job_id: the job's ID + Args: + job_id (str): the job's ID - :type source_uris: sequence of string or ``NoneType`` - :param source_uris: - URIs of one or more data files to be loaded. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.sourceUris - for supported URI formats. Pass None for jobs that load from a file. + source_uris (Optional[Sequence[str]]): + URIs of one or more data files to be loaded. See + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.source_uris + for supported URI formats. Pass None for jobs that load from a file. - :type destination: :class:`google.cloud.bigquery.table.TableReference` - :param destination: reference to table into which data is to be loaded. + destination (google.cloud.bigquery.table.TableReference): reference to table into which data is to be loaded. - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: A client which holds credentials and project configuration - for the dataset (which requires a project). + client (google.cloud.bigquery.client.Client): + A client which holds credentials and project configuration + for the dataset (which requires a project). """ _JOB_TYPE = "load" @@ -1296,7 +1378,7 @@ def destination(self): """google.cloud.bigquery.table.TableReference: table where loaded rows are written See: - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTable + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.destination_table """ return self._destination @@ -1400,7 +1482,7 @@ def schema(self): @property def destination_encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the destination table. Custom encryption configuration (e.g., Cloud KMS keys) @@ -1413,22 +1495,29 @@ def destination_encryption_configuration(self): @property def destination_table_description(self): - """Union[str, None] name given to destination table. + """Optional[str] name given to destination table. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTableProperties.description + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#DestinationTableProperties.FIELDS.description """ return self._configuration.destination_table_description @property def destination_table_friendly_name(self): - """Union[str, None] name given to destination table. + """Optional[str] name given to destination table. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTableProperties.friendlyName + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#DestinationTableProperties.FIELDS.friendly_name """ return self._configuration.destination_table_friendly_name + @property + def range_partitioning(self): + """See + :attr:`google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. + """ + return self._configuration.range_partitioning + @property def time_partitioning(self): """See @@ -1461,9 +1550,11 @@ def schema_update_options(self): def input_file_bytes(self): """Count of bytes loaded from source files. - :rtype: int, or ``NoneType`` - :returns: the count (None until set from the server). - :raises: ValueError for invalid value types. + Returns: + Optional[int]: the count (None until set from the server). + + Raises: + ValueError: for invalid value types. """ return _helpers._int_or_none( _helpers._get_sub_prop( @@ -1475,8 +1566,8 @@ def input_file_bytes(self): def input_files(self): """Count of source files. - :rtype: int, or ``NoneType`` - :returns: the count (None until set from the server). + Returns: + Optional[int]: the count (None until set from the server). """ return _helpers._int_or_none( _helpers._get_sub_prop( @@ -1488,8 +1579,8 @@ def input_files(self): def output_bytes(self): """Count of bytes saved to destination table. - :rtype: int, or ``NoneType`` - :returns: the count (None until set from the server). + Returns: + Optional[int]: the count (None until set from the server). """ return _helpers._int_or_none( _helpers._get_sub_prop( @@ -1501,8 +1592,8 @@ def output_bytes(self): def output_rows(self): """Count of rows saved to destination table. - :rtype: int, or ``NoneType`` - :returns: the count (None until set from the server). + Returns: + Optional[int]: the count (None until set from the server). """ return _helpers._int_or_none( _helpers._get_sub_prop( @@ -1539,15 +1630,15 @@ def from_api_repr(cls, resource, client): This method assumes that the project found in the resource matches the client's project. - :type resource: dict - :param resource: dataset job representation returned from the API + Args: + resource (Dict): dataset job representation returned from the API - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: Client which holds credentials and project - configuration for the dataset. + client (google.cloud.bigquery.client.Client): + Client which holds credentials and project + configuration for the dataset. - :rtype: :class:`google.cloud.bigquery.job.LoadJob` - :returns: Job parsed from ``resource``. + Returns: + google.cloud.bigquery.job.LoadJob: Job parsed from ``resource``. """ config_resource = resource.get("configuration", {}) config = LoadJobConfig.from_api_repr(config_resource) @@ -1580,7 +1671,7 @@ def create_disposition(self): for creating tables. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy.createDisposition + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy.FIELDS.create_disposition """ return self._get_sub_prop("createDisposition") @@ -1594,7 +1685,7 @@ def write_disposition(self): the destination table already exists. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy.writeDisposition + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy.FIELDS.write_disposition """ return self._get_sub_prop("writeDisposition") @@ -1604,14 +1695,14 @@ def write_disposition(self, value): @property def destination_encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the destination table. Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` if using default encryption. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy.destinationEncryptionConfiguration + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy.FIELDS.destination_encryption_configuration """ prop = self._get_sub_prop("destinationEncryptionConfiguration") if prop is not None: @@ -1629,22 +1720,19 @@ def destination_encryption_configuration(self, value): class CopyJob(_AsyncJob): """Asynchronous job: copy data into a table from other tables. - :type job_id: str - :param job_id: the job's ID, within the project belonging to ``client``. + Args: + job_id (str): the job's ID, within the project belonging to ``client``. - :type sources: list of :class:`google.cloud.bigquery.table.TableReference` - :param sources: Table from which data is to be loaded. + sources (List[google.cloud.bigquery.table.TableReference]): Table from which data is to be loaded. - :type destination: :class:`google.cloud.bigquery.table.TableReference` - :param destination: Table into which data is to be loaded. + destination (google.cloud.bigquery.table.TableReference): Table into which data is to be loaded. - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: A client which holds credentials and project configuration - for the dataset (which requires a project). + client (google.cloud.bigquery.client.Client): + A client which holds credentials and project configuration + for the dataset (which requires a project). - :type job_config: :class:`~google.cloud.bigquery.job.CopyJobConfig` - :param job_config: - (Optional) Extra configuration options for the copy job. + job_config (google.cloud.bigquery.job.CopyJobConfig): + (Optional) Extra configuration options for the copy job. """ _JOB_TYPE = "copy" @@ -1675,7 +1763,7 @@ def write_disposition(self): @property def destination_encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the destination table. Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` @@ -1728,15 +1816,15 @@ def from_api_repr(cls, resource, client): This method assumes that the project found in the resource matches the client's project. - :type resource: dict - :param resource: dataset job representation returned from the API + Args: + resource (Dict): dataset job representation returned from the API - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: Client which holds credentials and project - configuration for the dataset. + client (google.cloud.bigquery.client.Client): + Client which holds credentials and project + configuration for the dataset. - :rtype: :class:`google.cloud.bigquery.job.CopyJob` - :returns: Job parsed from ``resource``. + Returns: + google.cloud.bigquery.job.CopyJob: Job parsed from ``resource``. """ job_id, config_resource = cls._get_resource_config(resource) config = CopyJobConfig.from_api_repr(config_resource) @@ -1775,7 +1863,7 @@ def compression(self): exported files. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.compression + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract.FIELDS.compression """ return self._get_sub_prop("compression") @@ -1788,7 +1876,7 @@ def destination_format(self): """google.cloud.bigquery.job.DestinationFormat: Exported file format. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.destinationFormat + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract.FIELDS.destination_format """ return self._get_sub_prop("destinationFormat") @@ -1801,7 +1889,7 @@ def field_delimiter(self): """str: Delimiter to use between fields in the exported data. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.fieldDelimiter + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract.FIELDS.field_delimiter """ return self._get_sub_prop("fieldDelimiter") @@ -1814,7 +1902,7 @@ def print_header(self): """bool: Print a header row in the exported data. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.printHeader + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract.FIELDS.print_header """ return self._get_sub_prop("printHeader") @@ -1826,24 +1914,21 @@ def print_header(self, value): class ExtractJob(_AsyncJob): """Asynchronous job: extract data from a table into Cloud Storage. - :type job_id: str - :param job_id: the job's ID + Args: + job_id (str): the job's ID. - :type source: :class:`google.cloud.bigquery.table.TableReference` - :param source: Table into which data is to be loaded. + source (google.cloud.bigquery.table.TableReference): + Table into which data is to be loaded. - :type destination_uris: list of string - :param destination_uris: - URIs describing where the extracted data will be written in Cloud - Storage, using the format ``gs:///``. + destination_uris (List[str]): + URIs describing where the extracted data will be written in Cloud + Storage, using the format ``gs:///``. - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: - A client which holds credentials and project configuration. + client (google.cloud.bigquery.client.Client): + A client which holds credentials and project configuration. - :type job_config: :class:`~google.cloud.bigquery.job.ExtractJobConfig` - :param job_config: - (Optional) Extra configuration options for the extract job. + job_config (google.cloud.bigquery.job.ExtractJobConfig): + (Optional) Extra configuration options for the extract job. """ _JOB_TYPE = "extract" @@ -1891,14 +1976,15 @@ def destination_uri_file_counts(self): """Return file counts from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.extract.destinationUriFileCounts + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics4.FIELDS.destination_uri_file_counts Returns: - a list of integer counts, each representing the number of files - per destination URI or URI pattern specified in the extract - configuration. These values will be in the same order as the URIs - specified in the 'destinationUris' field. Returns None if job is - not yet complete. + List[int]: + A list of integer counts, each representing the number of files + per destination URI or URI pattern specified in the extract + configuration. These values will be in the same order as the URIs + specified in the 'destinationUris' field. Returns None if job is + not yet complete. """ counts = self._job_statistics().get("destinationUriFileCounts") if counts is not None: @@ -1938,15 +2024,15 @@ def from_api_repr(cls, resource, client): This method assumes that the project found in the resource matches the client's project. - :type resource: dict - :param resource: dataset job representation returned from the API + Args: + resource (Dict): dataset job representation returned from the API - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: Client which holds credentials and project - configuration for the dataset. + client (google.cloud.bigquery.client.Client): + Client which holds credentials and project + configuration for the dataset. - :rtype: :class:`google.cloud.bigquery.job.ExtractJob` - :returns: Job parsed from ``resource``. + Returns: + google.cloud.bigquery.job.ExtractJob: Job parsed from ``resource``. """ job_id, config_resource = cls._get_resource_config(resource) config = ExtractJobConfig.from_api_repr(config_resource) @@ -2007,14 +2093,14 @@ def __init__(self, **kwargs): @property def destination_encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the destination table. Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` if using default encryption. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.destinationEncryptionConfiguration + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.destination_encryption_configuration """ prop = self._get_sub_prop("destinationEncryptionConfiguration") if prop is not None: @@ -2033,7 +2119,7 @@ def allow_large_results(self): """bool: Allow large query results tables (legacy SQL, only) See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.allowLargeResults + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.allow_large_results """ return self._get_sub_prop("allowLargeResults") @@ -2047,7 +2133,7 @@ def create_disposition(self): for creating tables. See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.createDisposition + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.create_disposition """ return self._get_sub_prop("createDisposition") @@ -2070,7 +2156,7 @@ def default_dataset(self): separated by ``.``. For example: ``your-project.your_dataset``. See - https://g.co/cloud/bigquery/docs/reference/v2/jobs#configuration.query.defaultDataset + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.default_dataset """ prop = self._get_sub_prop("defaultDataset") if prop is not None: @@ -2107,7 +2193,7 @@ def destination(self): ``your-project.your_dataset.your_table``. See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.destinationTable + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.destination_table """ prop = self._get_sub_prop("destinationTable") if prop is not None: @@ -2130,7 +2216,7 @@ def dry_run(self): costs. See - https://g.co/cloud/bigquery/docs/reference/v2/jobs#configuration.dryRun + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfiguration.FIELDS.dry_run """ return self._properties.get("dryRun") @@ -2143,7 +2229,7 @@ def flatten_results(self): """bool: Flatten nested/repeated fields in results. (Legacy SQL only) See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.flattenResults + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.flatten_results """ return self._get_sub_prop("flattenResults") @@ -2157,7 +2243,7 @@ def maximum_billing_tier(self): queries. See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.maximumBillingTier + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.maximum_billing_tier """ return self._get_sub_prop("maximumBillingTier") @@ -2170,7 +2256,7 @@ def maximum_bytes_billed(self): """int: Maximum bytes to be billed for this job or :data:`None` if not set. See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.maximumBytesBilled + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.maximum_bytes_billed """ return _helpers._int_or_none(self._get_sub_prop("maximumBytesBilled")) @@ -2183,7 +2269,7 @@ def priority(self): """google.cloud.bigquery.job.QueryPriority: Priority of the query. See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.priority + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.priority """ return self._get_sub_prop("priority") @@ -2199,7 +2285,7 @@ def query_parameters(self): for parameterized query (empty by default) See: - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.queryParameters + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.query_parameters """ prop = self._get_sub_prop("queryParameters", default=[]) return _from_api_repr_query_parameters(prop) @@ -2208,13 +2294,47 @@ def query_parameters(self): def query_parameters(self, values): self._set_sub_prop("queryParameters", _to_api_repr_query_parameters(values)) + @property + def range_partitioning(self): + """Optional[google.cloud.bigquery.table.RangePartitioning]: + Configures range-based partitioning for destination table. + + .. note:: + **Beta**. The integer range partitioning feature is in a + pre-release state and might change or have limited support. + + Only specify at most one of + :attr:`~google.cloud.bigquery.job.LoadJobConfig.time_partitioning` or + :attr:`~google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. + + Raises: + ValueError: + If the value is not + :class:`~google.cloud.bigquery.table.RangePartitioning` or + :data:`None`. + """ + resource = self._get_sub_prop("rangePartitioning") + if resource is not None: + return RangePartitioning(_properties=resource) + + @range_partitioning.setter + def range_partitioning(self, value): + resource = value + if isinstance(value, RangePartitioning): + resource = value._properties + elif value is not None: + raise ValueError( + "Expected value to be RangePartitioning or None, got {}.".format(value) + ) + self._set_sub_prop("rangePartitioning", resource) + @property def udf_resources(self): """List[google.cloud.bigquery.query.UDFResource]: user defined function resources (empty by default) See: - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.userDefinedFunctionResources + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.user_defined_function_resources """ prop = self._get_sub_prop("userDefinedFunctionResources", default=[]) return _from_api_repr_udf_resources(prop) @@ -2230,7 +2350,7 @@ def use_legacy_sql(self): """bool: Use legacy SQL syntax. See - https://g.co/cloud/bigquery/docs/reference/v2/jobs#configuration.query.useLegacySql + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.use_legacy_sql """ return self._get_sub_prop("useLegacySql") @@ -2243,7 +2363,7 @@ def use_query_cache(self): """bool: Look for the query result in the cache. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.useQueryCache + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.use_query_cache """ return self._get_sub_prop("useQueryCache") @@ -2257,7 +2377,7 @@ def write_disposition(self): the destination table already exists. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.writeDisposition + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.write_disposition """ return self._get_sub_prop("writeDisposition") @@ -2271,7 +2391,7 @@ def table_definitions(self): Definitions for external tables or :data:`None` if not set. See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.external_table_definitions """ prop = self._get_sub_prop("tableDefinitions") if prop is not None: @@ -2284,8 +2404,18 @@ def table_definitions(self, values): @property def time_partitioning(self): - """google.cloud.bigquery.table.TimePartitioning: Specifies time-based - partitioning for the destination table. + """Optional[google.cloud.bigquery.table.TimePartitioning]: Specifies + time-based partitioning for the destination table. + + Only specify at most one of + :attr:`~google.cloud.bigquery.job.LoadJobConfig.time_partitioning` or + :attr:`~google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. + + Raises: + ValueError: + If the value is not + :class:`~google.cloud.bigquery.table.TimePartitioning` or + :data:`None`. """ prop = self._get_sub_prop("timePartitioning") if prop is not None: @@ -2301,7 +2431,7 @@ def time_partitioning(self, value): @property def clustering_fields(self): - """Union[List[str], None]: Fields defining clustering for the table + """Optional[List[str]]: Fields defining clustering for the table (Defaults to :data:`None`). @@ -2318,7 +2448,7 @@ def clustering_fields(self): @clustering_fields.setter def clustering_fields(self, value): - """Union[List[str], None]: Fields defining clustering for the table + """Optional[List[str]]: Fields defining clustering for the table (Defaults to :data:`None`). """ @@ -2343,7 +2473,7 @@ def to_api_repr(self): """Build an API representation of the query job config. Returns: - dict: A dictionary in the format used by the BigQuery API. + Dict: A dictionary in the format used by the BigQuery API. """ resource = copy.deepcopy(self._properties) @@ -2362,19 +2492,17 @@ def to_api_repr(self): class QueryJob(_AsyncJob): """Asynchronous job: query tables. - :type job_id: str - :param job_id: the job's ID, within the project belonging to ``client``. + Args: + job_id (str): the job's ID, within the project belonging to ``client``. - :type query: str - :param query: SQL query string + query (str): SQL query string. - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: A client which holds credentials and project configuration - for the dataset (which requires a project). + client (google.cloud.bigquery.client.Client): + A client which holds credentials and project configuration + for the dataset (which requires a project). - :type job_config: :class:`~google.cloud.bigquery.job.QueryJobConfig` - :param job_config: - (Optional) Extra configuration options for the query job. + job_config (google.cloud.bigquery.job.QueryJobConfig): + (Optional) Extra configuration options for the query job. """ _JOB_TYPE = "query" @@ -2426,7 +2554,7 @@ def destination(self): @property def destination_encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the destination table. Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` @@ -2463,7 +2591,7 @@ def query(self): """str: The query text used in this query job. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.query + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.query """ return _helpers._get_sub_prop( self._properties, ["configuration", "query", "query"] @@ -2518,6 +2646,13 @@ def maximum_bytes_billed(self): """ return self._configuration.maximum_bytes_billed + @property + def range_partitioning(self): + """See + :attr:`google.cloud.bigquery.job.QueryJobConfig.range_partitioning`. + """ + return self._configuration.range_partitioning + @property def table_definitions(self): """See @@ -2566,15 +2701,15 @@ def _copy_configuration_properties(self, configuration): def from_api_repr(cls, resource, client): """Factory: construct a job given its API representation - :type resource: dict - :param resource: dataset job representation returned from the API + Args: + resource (Dict): dataset job representation returned from the API - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: Client which holds credentials and project - configuration for the dataset. + client (google.cloud.bigquery.client.Client): + Client which holds credentials and project + configuration for the dataset. - :rtype: :class:`google.cloud.bigquery.job.QueryJob` - :returns: Job parsed from ``resource``. + Returns: + google.cloud.bigquery.job.QueryJob: Job parsed from ``resource``. """ job_id, config = cls._get_resource_config(resource) query = _helpers._get_sub_prop(config, ["query", "query"]) @@ -2587,11 +2722,12 @@ def query_plan(self): """Return query plan from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.queryPlan + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.query_plan - :rtype: list of :class:`QueryPlanEntry` - :returns: mappings describing the query plan, or an empty list - if the query has not yet completed. + Returns: + List[QueryPlanEntry]: + mappings describing the query plan, or an empty list + if the query has not yet completed. """ plan_entries = self._job_statistics().get("queryPlan", ()) return [QueryPlanEntry.from_api_repr(entry) for entry in plan_entries] @@ -2609,11 +2745,12 @@ def total_bytes_processed(self): """Return total bytes processed from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.totalBytesProcessed + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.total_bytes_processed - :rtype: int or None - :returns: total bytes processed by the job, or None if job is not - yet complete. + Returns: + Optional[int]: + Total bytes processed by the job, or None if job is not + yet complete. """ result = self._job_statistics().get("totalBytesProcessed") if result is not None: @@ -2625,11 +2762,12 @@ def total_bytes_billed(self): """Return total bytes billed from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.totalBytesBilled + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.total_bytes_billed - :rtype: int or None - :returns: total bytes processed by the job, or None if job is not - yet complete. + Returns: + Optional[int]: + Total bytes processed by the job, or None if job is not + yet complete. """ result = self._job_statistics().get("totalBytesBilled") if result is not None: @@ -2641,11 +2779,12 @@ def billing_tier(self): """Return billing tier from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.billingTier + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.billing_tier - :rtype: int or None - :returns: billing tier used by the job, or None if job is not - yet complete. + Returns: + Optional[int]: + Billing tier used by the job, or None if job is not + yet complete. """ return self._job_statistics().get("billingTier") @@ -2654,11 +2793,12 @@ def cache_hit(self): """Return whether or not query results were served from cache. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.cacheHit + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.cache_hit - :rtype: bool or None - :returns: whether the query results were returned from cache, or None - if job is not yet complete. + Returns: + Optional[bool]: + whether the query results were returned from cache, or None + if job is not yet complete. """ return self._job_statistics().get("cacheHit") @@ -2667,7 +2807,7 @@ def ddl_operation_performed(self): """Optional[str]: Return the DDL operation performed. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.ddlOperationPerformed + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.ddl_operation_performed """ return self._job_statistics().get("ddlOperationPerformed") @@ -2678,7 +2818,7 @@ def ddl_target_routine(self): for CREATE/DROP FUNCTION/PROCEDURE queries. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/JobStatistics + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.ddl_target_routine """ prop = self._job_statistics().get("ddlTargetRoutine") if prop is not None: @@ -2691,7 +2831,7 @@ def ddl_target_table(self): for CREATE/DROP TABLE/VIEW queries. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.ddlTargetTable + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.ddl_target_table """ prop = self._job_statistics().get("ddlTargetTable") if prop is not None: @@ -2703,11 +2843,12 @@ def num_dml_affected_rows(self): """Return the number of DML rows affected by the job. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.numDmlAffectedRows + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.num_dml_affected_rows - :rtype: int or None - :returns: number of DML rows affected by the job, or None if job is not - yet complete. + Returns: + Optional[int]: + number of DML rows affected by the job, or None if job is not + yet complete. """ result = self._job_statistics().get("numDmlAffectedRows") if result is not None: @@ -2724,11 +2865,12 @@ def statement_type(self): """Return statement type from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.statementType + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.statement_type - :rtype: str or None - :returns: type of statement used by the job, or None if job is not - yet complete. + Returns: + Optional[str]: + type of statement used by the job, or None if job is not + yet complete. """ return self._job_statistics().get("statementType") @@ -2737,11 +2879,12 @@ def referenced_tables(self): """Return referenced tables from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.referencedTables + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.referenced_tables - :rtype: list of dict - :returns: mappings describing the query plan, or an empty list - if the query has not yet completed. + Returns: + List[Dict]: + mappings describing the query plan, or an empty list + if the query has not yet completed. """ tables = [] datasets_by_project_name = {} @@ -2766,15 +2909,16 @@ def undeclared_query_parameters(self): """Return undeclared query parameters from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.undeclaredQueryParameters + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.undeclared_query_parameters - :rtype: - list of - :class:`~google.cloud.bigquery.ArrayQueryParameter`, - :class:`~google.cloud.bigquery.ScalarQueryParameter`, or - :class:`~google.cloud.bigquery.StructQueryParameter` - :returns: undeclared parameters, or an empty list if the query has - not yet completed. + Returns: + List[Union[ \ + google.cloud.bigquery.query.ArrayQueryParameter, \ + google.cloud.bigquery.query.ScalarQueryParameter, \ + google.cloud.bigquery.query.StructQueryParameter \ + ]]: + Undeclared parameters, or an empty list if the query has + not yet completed. """ parameters = [] undeclared = self._job_statistics().get("undeclaredQueryParameters", ()) @@ -2798,11 +2942,12 @@ def estimated_bytes_processed(self): """Return the estimated number of bytes processed by the query. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.estimatedBytesProcessed + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.estimated_bytes_processed - :rtype: int or None - :returns: number of DML rows affected by the job, or None if job is not - yet complete. + Returns: + Optional[int]: + number of DML rows affected by the job, or None if job is not + yet complete. """ result = self._job_statistics().get("estimatedBytesProcessed") if result is not None: @@ -2812,8 +2957,8 @@ def estimated_bytes_processed(self): def done(self, retry=DEFAULT_RETRY): """Refresh the job and checks if it is complete. - :rtype: bool - :returns: True if the job is complete, False otherwise. + Returns: + bool: True if the job is complete, False otherwise. """ # Since the API to getQueryResults can hang up to the timeout value # (default of 10 seconds), set the timeout parameter to ensure that @@ -2859,8 +3004,8 @@ def _format_for_exception(query, job_id): query (str): The SQL query to format. job_id (str): The ID of the job that ran the query. - Returns: (str) - A formatted query text. + Returns: + str: A formatted query text. """ template = "\n\n(job ID: {job_id})\n\n{header}\n\n{ruler}\n{body}\n{ruler}" @@ -2895,14 +3040,14 @@ def _begin(self, client=None, retry=DEFAULT_RETRY): How to retry the RPC. Raises: - ValueError: - If the job has already begun. + ValueError: If the job has already begun. """ try: super(QueryJob, self)._begin(client=client, retry=retry) except exceptions.GoogleCloudError as exc: exc.message += self._format_for_exception(self.query, self.job_id) + exc.query_job = self raise def result( @@ -2945,6 +3090,7 @@ def result( ) except exceptions.GoogleCloudError as exc: exc.message += self._format_for_exception(self.query, self.job_id) + exc.query_job = self raise # If the query job is complete but there are no query results, this was @@ -2989,9 +3135,7 @@ def to_arrow(self, progress_bar_type=None, bqstorage_client=None): ``'tqdm_gui'`` Use the :func:`tqdm.tqdm_gui` function to display a progress bar as a graphical dialog box. - bqstorage_client ( \ - google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient \ - ): + bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): **Beta Feature** Optional. A BigQuery Storage API client. If supplied, use the faster BigQuery Storage API to fetch rows from BigQuery. This API is a billable API. @@ -3024,9 +3168,7 @@ def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=Non """Return a pandas DataFrame from a QueryJob Args: - bqstorage_client ( \ - google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient \ - ): + bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): **Alpha Feature** Optional. A BigQuery Storage API client. If supplied, use the faster BigQuery Storage API to fetch rows from BigQuery. This API is a billable API. @@ -3040,9 +3182,7 @@ def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=Non **Caution**: There is a known issue reading small anonymous query result tables with the BQ Storage API. Write your query results to a destination table to work around this issue. - dtypes ( \ - Map[str, Union[str, pandas.Series.dtype]] \ - ): + dtypes (Map[str, Union[str, pandas.Series.dtype]]): Optional. A dictionary of column names pandas ``dtype``s. The provided ``dtype`` is used when constructing the series for the column specified. Otherwise, the default pandas behavior @@ -3079,11 +3219,10 @@ def __iter__(self): class QueryPlanEntryStep(object): """Map a single step in a query plan entry. - :type kind: str - :param kind: step type + Args: + kind (str): step type. - :type substeps: - :param substeps: names of substeps + substeps (List): names of substeps. """ def __init__(self, kind, substeps): @@ -3094,11 +3233,11 @@ def __init__(self, kind, substeps): def from_api_repr(cls, resource): """Factory: construct instance from the JSON repr. - :type resource: dict - :param resource: JSON representation of the entry + Args: + resource (Dict): JSON representation of the entry. - :rtype: :class:`QueryPlanEntryStep` - :return: new instance built from the resource + Returns: + QueryPlanEntryStep: new instance built from the resource. """ return cls(kind=resource.get("kind"), substeps=resource.get("substeps", ())) @@ -3112,9 +3251,8 @@ class QueryPlanEntry(object): """QueryPlanEntry represents a single stage of a query execution plan. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#ExplainQueryStage for the underlying API representation within query statistics. - """ def __init__(self): @@ -3126,11 +3264,11 @@ def from_api_repr(cls, resource): Args: resource(Dict[str: object]): - ExplainQueryStage representation returned from API + ExplainQueryStage representation returned from API. Returns: google.cloud.bigquery.QueryPlanEntry: - Query plan entry parsed from ``resource`` + Query plan entry parsed from ``resource``. """ entry = cls() entry._properties = resource @@ -3138,17 +3276,17 @@ def from_api_repr(cls, resource): @property def name(self): - """Union[str, None]: Human-readable name of the stage.""" + """Optional[str]: Human-readable name of the stage.""" return self._properties.get("name") @property def entry_id(self): - """Union[str, None]: Unique ID for the stage within the plan.""" + """Optional[str]: Unique ID for the stage within the plan.""" return self._properties.get("id") @property def start(self): - """Union[Datetime, None]: Datetime when the stage started.""" + """Optional[Datetime]: Datetime when the stage started.""" if self._properties.get("startMs") is None: return None return _helpers._datetime_from_microseconds( @@ -3157,7 +3295,7 @@ def start(self): @property def end(self): - """Union[Datetime, None]: Datetime when the stage ended.""" + """Optional[Datetime]: Datetime when the stage ended.""" if self._properties.get("endMs") is None: return None return _helpers._datetime_from_microseconds( @@ -3176,33 +3314,33 @@ def input_stages(self): @property def parallel_inputs(self): - """Union[int, None]: Number of parallel input segments within + """Optional[int]: Number of parallel input segments within the stage. """ return _helpers._int_or_none(self._properties.get("parallelInputs")) @property def completed_parallel_inputs(self): - """Union[int, None]: Number of parallel input segments completed.""" + """Optional[int]: Number of parallel input segments completed.""" return _helpers._int_or_none(self._properties.get("completedParallelInputs")) @property def wait_ms_avg(self): - """Union[int, None]: Milliseconds the average worker spent waiting to + """Optional[int]: Milliseconds the average worker spent waiting to be scheduled. """ return _helpers._int_or_none(self._properties.get("waitMsAvg")) @property def wait_ms_max(self): - """Union[int, None]: Milliseconds the slowest worker spent waiting to + """Optional[int]: Milliseconds the slowest worker spent waiting to be scheduled. """ return _helpers._int_or_none(self._properties.get("waitMsMax")) @property def wait_ratio_avg(self): - """Union[float, None]: Ratio of time the average worker spent waiting + """Optional[float]: Ratio of time the average worker spent waiting to be scheduled, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3210,7 +3348,7 @@ def wait_ratio_avg(self): @property def wait_ratio_max(self): - """Union[float, None]: Ratio of time the slowest worker spent waiting + """Optional[float]: Ratio of time the slowest worker spent waiting to be scheduled, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3218,21 +3356,21 @@ def wait_ratio_max(self): @property def read_ms_avg(self): - """Union[int, None]: Milliseconds the average worker spent reading + """Optional[int]: Milliseconds the average worker spent reading input. """ return _helpers._int_or_none(self._properties.get("readMsAvg")) @property def read_ms_max(self): - """Union[int, None]: Milliseconds the slowest worker spent reading + """Optional[int]: Milliseconds the slowest worker spent reading input. """ return _helpers._int_or_none(self._properties.get("readMsMax")) @property def read_ratio_avg(self): - """Union[float, None]: Ratio of time the average worker spent reading + """Optional[float]: Ratio of time the average worker spent reading input, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3240,7 +3378,7 @@ def read_ratio_avg(self): @property def read_ratio_max(self): - """Union[float, None]: Ratio of time the slowest worker spent reading + """Optional[float]: Ratio of time the slowest worker spent reading to be scheduled, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3248,21 +3386,21 @@ def read_ratio_max(self): @property def compute_ms_avg(self): - """Union[int, None]: Milliseconds the average worker spent on CPU-bound + """Optional[int]: Milliseconds the average worker spent on CPU-bound processing. """ return _helpers._int_or_none(self._properties.get("computeMsAvg")) @property def compute_ms_max(self): - """Union[int, None]: Milliseconds the slowest worker spent on CPU-bound + """Optional[int]: Milliseconds the slowest worker spent on CPU-bound processing. """ return _helpers._int_or_none(self._properties.get("computeMsMax")) @property def compute_ratio_avg(self): - """Union[float, None]: Ratio of time the average worker spent on + """Optional[float]: Ratio of time the average worker spent on CPU-bound processing, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3270,7 +3408,7 @@ def compute_ratio_avg(self): @property def compute_ratio_max(self): - """Union[float, None]: Ratio of time the slowest worker spent on + """Optional[float]: Ratio of time the slowest worker spent on CPU-bound processing, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3278,21 +3416,21 @@ def compute_ratio_max(self): @property def write_ms_avg(self): - """Union[int, None]: Milliseconds the average worker spent writing + """Optional[int]: Milliseconds the average worker spent writing output data. """ return _helpers._int_or_none(self._properties.get("writeMsAvg")) @property def write_ms_max(self): - """Union[int, None]: Milliseconds the slowest worker spent writing + """Optional[int]: Milliseconds the slowest worker spent writing output data. """ return _helpers._int_or_none(self._properties.get("writeMsMax")) @property def write_ratio_avg(self): - """Union[float, None]: Ratio of time the average worker spent writing + """Optional[float]: Ratio of time the average worker spent writing output data, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3300,7 +3438,7 @@ def write_ratio_avg(self): @property def write_ratio_max(self): - """Union[float, None]: Ratio of time the slowest worker spent writing + """Optional[float]: Ratio of time the slowest worker spent writing output data, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3308,29 +3446,29 @@ def write_ratio_max(self): @property def records_read(self): - """Union[int, None]: Number of records read by this stage.""" + """Optional[int]: Number of records read by this stage.""" return _helpers._int_or_none(self._properties.get("recordsRead")) @property def records_written(self): - """Union[int, None]: Number of records written by this stage.""" + """Optional[int]: Number of records written by this stage.""" return _helpers._int_or_none(self._properties.get("recordsWritten")) @property def status(self): - """Union[str, None]: status of this stage.""" + """Optional[str]: status of this stage.""" return self._properties.get("status") @property def shuffle_output_bytes(self): - """Union[int, None]: Number of bytes written by this stage to + """Optional[int]: Number of bytes written by this stage to intermediate shuffle. """ return _helpers._int_or_none(self._properties.get("shuffleOutputBytes")) @property def shuffle_output_bytes_spilled(self): - """Union[int, None]: Number of bytes written by this stage to + """Optional[int]: Number of bytes written by this stage to intermediate shuffle and spilled to disk. """ return _helpers._int_or_none(self._properties.get("shuffleOutputBytesSpilled")) @@ -3351,9 +3489,8 @@ class TimelineEntry(object): point in time. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#querytimelinesample for the underlying API representation within query statistics. - """ def __init__(self): @@ -3365,11 +3502,11 @@ def from_api_repr(cls, resource): Args: resource(Dict[str: object]): - QueryTimelineSample representation returned from API + QueryTimelineSample representation returned from API. Returns: google.cloud.bigquery.TimelineEntry: - Timeline sample parsed from ``resource`` + Timeline sample parsed from ``resource``. """ entry = cls() entry._properties = resource @@ -3377,31 +3514,31 @@ def from_api_repr(cls, resource): @property def elapsed_ms(self): - """Union[int, None]: Milliseconds elapsed since start of query + """Optional[int]: Milliseconds elapsed since start of query execution.""" return _helpers._int_or_none(self._properties.get("elapsedMs")) @property def active_units(self): - """Union[int, None]: Current number of input units being processed + """Optional[int]: Current number of input units being processed by workers, reported as largest value since the last sample.""" return _helpers._int_or_none(self._properties.get("activeUnits")) @property def pending_units(self): - """Union[int, None]: Current number of input units remaining for + """Optional[int]: Current number of input units remaining for query stages active at this sample time.""" return _helpers._int_or_none(self._properties.get("pendingUnits")) @property def completed_units(self): - """Union[int, None]: Current number of input units completed by + """Optional[int]: Current number of input units completed by this query.""" return _helpers._int_or_none(self._properties.get("completedUnits")) @property def slot_millis(self): - """Union[int, None]: Cumulative slot-milliseconds consumed by + """Optional[int]: Cumulative slot-milliseconds consumed by this query.""" return _helpers._int_or_none(self._properties.get("totalSlotMs")) @@ -3414,7 +3551,7 @@ def from_api_repr(cls, resource, client): """Construct an UnknownJob from the JSON representation. Args: - resource (dict): JSON representation of a job. + resource (Dict): JSON representation of a job. client (google.cloud.bigquery.client.Client): Client connected to BigQuery API. @@ -3429,3 +3566,81 @@ def from_api_repr(cls, resource, client): resource["jobReference"] = job_ref_properties job._properties = resource return job + + +class ScriptStackFrame(object): + """Stack frame showing the line/column/procedure name where the current + evaluation happened. + + Args: + resource (Map[str, Any]): JSON representation of object. + """ + + def __init__(self, resource): + self._properties = resource + + @property + def procedure_id(self): + """Optional[str]: Name of the active procedure. + + Omitted if in a top-level script. + """ + return self._properties.get("procedureId") + + @property + def text(self): + """str: Text of the current statement/expression.""" + return self._properties.get("text") + + @property + def start_line(self): + """int: One-based start line.""" + return _helpers._int_or_none(self._properties.get("startLine")) + + @property + def start_column(self): + """int: One-based start column.""" + return _helpers._int_or_none(self._properties.get("startColumn")) + + @property + def end_line(self): + """int: One-based end line.""" + return _helpers._int_or_none(self._properties.get("endLine")) + + @property + def end_column(self): + """int: One-based end column.""" + return _helpers._int_or_none(self._properties.get("endColumn")) + + +class ScriptStatistics(object): + """Statistics for a child job of a script. + + Args: + resource (Map[str, Any]): JSON representation of object. + """ + + def __init__(self, resource): + self._properties = resource + + @property + def stack_frames(self): + """List[ScriptStackFrame]: Stack trace where the current evaluation + happened. + + Shows line/column/procedure name of each frame on the stack at the + point where the current evaluation happened. + + The leaf frame is first, the primary script is last. + """ + return [ + ScriptStackFrame(frame) for frame in self._properties.get("stackFrames", []) + ] + + @property + def evaluation_kind(self): + """str: Indicates the type of child job. + + Possible values include ``STATEMENT`` and ``EXPRESSION``. + """ + return self._properties.get("evaluationKind") diff --git a/bigquery/google/cloud/bigquery/magics.py b/bigquery/google/cloud/bigquery/magics.py index b2dae2511ec8..59265ed6b0c5 100644 --- a/bigquery/google/cloud/bigquery/magics.py +++ b/bigquery/google/cloud/bigquery/magics.py @@ -28,7 +28,13 @@ * ```` (optional, line argument): variable to store the query results. The results are not displayed if - this parameter is used. + this parameter is used. If an error occurs during the query execution, + the corresponding ``QueryJob`` instance (if available) is stored in + the variable instead. + * ``--destination_table`` (optional, line argument): + A dataset and table to store the query results. If table does not exists, + it will be created. If table already exists, its data will be overwritten. + Variable should be in a format .. * ``--project `` (optional, line argument): Project to use for running the query. Defaults to the context :attr:`~google.cloud.bigquery.magics.Context.project`. @@ -129,6 +135,7 @@ from __future__ import print_function +import re import ast import sys import time @@ -142,6 +149,7 @@ raise ImportError("This module can only be loaded in IPython.") from google.api_core import client_info +from google.api_core.exceptions import NotFound import google.auth from google.cloud import bigquery from google.cloud.bigquery.dbapi import _helpers @@ -266,6 +274,31 @@ def default_query_job_config(self, value): context = Context() +def _handle_error(error, destination_var=None): + """Process a query execution error. + + Args: + error (Exception): + An exception that ocurred during the query exectution. + destination_var (Optional[str]): + The name of the IPython session variable to store the query job. + """ + if destination_var: + query_job = getattr(error, "query_job", None) + + if query_job is not None: + IPython.get_ipython().push({destination_var: query_job}) + else: + # this is the case when previewing table rows by providing just + # table ID to cell magic + print( + "Could not save output to variable '{}'.".format(destination_var), + file=sys.stderr, + ) + + print("\nERROR:\n", str(error), file=sys.stderr) + + def _run_query(client, query, job_config=None): """Runs a query while printing status updates @@ -308,18 +341,58 @@ def _run_query(client, query, job_config=None): return query_job +def _create_dataset_if_necessary(client, dataset_id): + """Create a dataset in the current project if it doesn't exist. + + Args: + client (google.cloud.bigquery.client.Client): + Client to bundle configuration needed for API requests. + dataset_id (str): + Dataset id. + """ + dataset_reference = bigquery.dataset.DatasetReference(client.project, dataset_id) + try: + dataset = client.get_dataset(dataset_reference) + return + except NotFound: + pass + dataset = bigquery.Dataset(dataset_reference) + dataset.location = client.location + print("Creating dataset: {}".format(dataset_id)) + dataset = client.create_dataset(dataset) + + @magic_arguments.magic_arguments() @magic_arguments.argument( "destination_var", nargs="?", help=("If provided, save the output to this variable instead of displaying it."), ) +@magic_arguments.argument( + "--destination_table", + type=str, + default=None, + help=( + "If provided, save the output of the query to a new BigQuery table. " + "Variable should be in a format .. " + "If table does not exists, it will be created. " + "If table already exists, its data will be overwritten." + ), +) @magic_arguments.argument( "--project", type=str, default=None, help=("Project to use for executing this query. Defaults to the context project."), ) +@magic_arguments.argument( + "--max_results", + default=None, + help=( + "Maximum number of rows in dataframe returned from executing the query." + "Defaults to returning all rows." + ), +) @magic_arguments.argument( "--maximum_bytes_billed", default=None, @@ -420,35 +493,65 @@ def _cell_magic(line, query): bqstorage_client = _make_bqstorage_client( args.use_bqstorage_api or context.use_bqstorage_api, context.credentials ) + + if args.max_results: + max_results = int(args.max_results) + else: + max_results = None + + query = query.strip() + + # Any query that does not contain whitespace (aside from leading and trailing whitespace) + # is assumed to be a table id + if not re.search(r"\s", query): + try: + rows = client.list_rows(query, max_results=max_results) + except Exception as ex: + _handle_error(ex, args.destination_var) + return + + result = rows.to_dataframe(bqstorage_client=bqstorage_client) + if args.destination_var: + IPython.get_ipython().push({args.destination_var: result}) + return + else: + return result + job_config = bigquery.job.QueryJobConfig() job_config.query_parameters = params job_config.use_legacy_sql = args.use_legacy_sql job_config.dry_run = args.dry_run + if args.destination_table: + split = args.destination_table.split(".") + if len(split) != 2: + raise ValueError( + "--destination_table should be in a . format." + ) + dataset_id, table_id = split + job_config.allow_large_results = True + dataset_ref = client.dataset(dataset_id) + destination_table_ref = dataset_ref.table(table_id) + job_config.destination = destination_table_ref + job_config.create_disposition = "CREATE_IF_NEEDED" + job_config.write_disposition = "WRITE_TRUNCATE" + _create_dataset_if_necessary(client, dataset_id) + if args.maximum_bytes_billed == "None": job_config.maximum_bytes_billed = 0 elif args.maximum_bytes_billed is not None: value = int(args.maximum_bytes_billed) job_config.maximum_bytes_billed = value - error = None try: - query_job = _run_query(client, query, job_config) + query_job = _run_query(client, query, job_config=job_config) except Exception as ex: - error = str(ex) + _handle_error(ex, args.destination_var) + return if not args.verbose: display.clear_output() - if error: - if args.destination_var: - print( - "Could not save output to variable '{}'.".format(args.destination_var), - file=sys.stderr, - ) - print("\nERROR:\n", error, file=sys.stderr) - return - if args.dry_run and args.destination_var: IPython.get_ipython().push({args.destination_var: query_job}) return @@ -460,7 +563,13 @@ def _cell_magic(line, query): ) return query_job - result = query_job.to_dataframe(bqstorage_client=bqstorage_client) + if max_results: + result = query_job.result(max_results=max_results).to_dataframe( + bqstorage_client=bqstorage_client + ) + else: + result = query_job.to_dataframe(bqstorage_client=bqstorage_client) + if args.destination_var: IPython.get_ipython().push({args.destination_var: result}) else: diff --git a/bigquery/google/cloud/bigquery/model.py b/bigquery/google/cloud/bigquery/model.py index 4049a9232467..7bad752ea658 100644 --- a/bigquery/google/cloud/bigquery/model.py +++ b/bigquery/google/cloud/bigquery/model.py @@ -25,6 +25,7 @@ from google.api_core import datetime_helpers from google.cloud.bigquery import _helpers from google.cloud.bigquery_v2 import types +from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration class Model(object): @@ -34,10 +35,7 @@ class Model(object): https://cloud.google.com/bigquery/docs/reference/rest/v2/models Args: - model_ref (Union[ \ - :class:`~google.cloud.bigquery.model.ModelReference`, \ - str, \ - ]): + model_ref (Union[google.cloud.bigquery.model.ModelReference, str]): A pointer to a model. If ``model_ref`` is a string, it must included a project ID, dataset ID, and model ID, each separated by ``.``. @@ -51,6 +49,7 @@ class Model(object): # have an exhaustive list of all mutable properties. "labels": "labels", "description": "description", + "encryption_configuration": "encryptionConfiguration", } def __init__(self, model_ref): @@ -256,6 +255,30 @@ def labels(self, value): value = {} self._properties["labels"] = value + @property + def encryption_configuration(self): + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom + encryption configuration for the model. + + Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` + if using default encryption. + + See `protecting data with Cloud KMS keys + `_ + in the BigQuery documentation. + """ + prop = self._properties.get("encryptionConfiguration") + if prop: + prop = EncryptionConfiguration.from_api_repr(prop) + return prop + + @encryption_configuration.setter + def encryption_configuration(self, value): + api_repr = value + if value: + api_repr = value.to_api_repr() + self._properties["encryptionConfiguration"] = api_repr + @classmethod def from_api_repr(cls, resource): """Factory: construct a model resource given its API representation @@ -299,7 +322,7 @@ class ModelReference(object): """ModelReferences are pointers to models. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/models + https://cloud.google.com/bigquery/docs/reference/rest/v2/models#modelreference """ def __init__(self): diff --git a/bigquery/google/cloud/bigquery/query.py b/bigquery/google/cloud/bigquery/query.py index 4039be33db8c..925f3e29d298 100644 --- a/bigquery/google/cloud/bigquery/query.py +++ b/bigquery/google/cloud/bigquery/query.py @@ -26,11 +26,10 @@ class UDFResource(object): """Describe a single user-defined function (UDF) resource. - :type udf_type: str - :param udf_type: the type of the resource ('inlineCode' or 'resourceUri') + Args: + udf_type (str): the type of the resource ('inlineCode' or 'resourceUri') - :type value: str - :param value: the inline code or resource URI. + value (str): the inline code or resource URI. See https://cloud.google.com/bigquery/user-defined-functions#api @@ -57,17 +56,19 @@ class _AbstractQueryParameter(object): def from_api_repr(cls, resource): """Factory: construct parameter from JSON resource. - :type resource: dict - :param resource: JSON mapping of parameter + Args: + resource (Dict): JSON mapping of parameter - :rtype: :class:`~google.cloud.bigquery.query.ScalarQueryParameter` + Returns: + google.cloud.bigquery.query.ScalarQueryParameter """ raise NotImplementedError def to_api_repr(self): """Construct JSON API representation for the parameter. - :rtype: dict + Returns: + Dict: JSON representation for the parameter. """ raise NotImplementedError @@ -75,18 +76,18 @@ def to_api_repr(self): class ScalarQueryParameter(_AbstractQueryParameter): """Named / positional query parameters for scalar values. - :type name: str or None - :param name: Parameter name, used via ``@foo`` syntax. If None, the - parameter can only be addressed via position (``?``). + Args: + name (Optional[str]): + Parameter name, used via ``@foo`` syntax. If None, the + parameter can only be addressed via position (``?``). - :type type_: str - :param type_: name of parameter type. One of 'STRING', 'INT64', - 'FLOAT64', 'NUMERIC', 'BOOL', 'TIMESTAMP', 'DATETIME', or - 'DATE'. + type_ (str): + name of parameter type. One of 'STRING', 'INT64', + 'FLOAT64', 'NUMERIC', 'BOOL', 'TIMESTAMP', 'DATETIME', or + 'DATE'. - :type value: str, int, float, :class:`decimal.Decimal`, bool, - :class:`datetime.datetime`, or :class:`datetime.date`. - :param value: the scalar parameter value. + value (Union[str, int, float, decimal.Decimal, bool, + datetime.datetime, datetime.date]): the scalar parameter value. """ def __init__(self, name, type_, value): @@ -98,19 +99,18 @@ def __init__(self, name, type_, value): def positional(cls, type_, value): """Factory for positional paramater. - :type type_: str - :param type_: - name of parameter type. One of 'STRING', 'INT64', - 'FLOAT64', 'NUMERIC', 'BOOL', 'TIMESTAMP', 'DATETIME', or - 'DATE'. + Args: + type_ (str): + name of parameter type. One of 'STRING', 'INT64', + 'FLOAT64', 'NUMERIC', 'BOOL', 'TIMESTAMP', 'DATETIME', or + 'DATE'. - :type value: str, int, float, :class:`decimal.Decimal`, bool, - :class:`datetime.datetime`, or - :class:`datetime.date`. - :param value: the scalar parameter value. + value (Union[str, int, float, decimal.Decimal, bool, + datetime.datetime, + datetime.date]): the scalar parameter value. - :rtype: :class:`~google.cloud.bigquery.query.ScalarQueryParameter` - :returns: instance without name + Returns: + google.cloud.bigquery.query.ScalarQueryParameter: instance without name """ return cls(None, type_, value) @@ -118,11 +118,11 @@ def positional(cls, type_, value): def from_api_repr(cls, resource): """Factory: construct parameter from JSON resource. - :type resource: dict - :param resource: JSON mapping of parameter + Args: + resource (Dict): JSON mapping of parameter - :rtype: :class:`~google.cloud.bigquery.query.ScalarQueryParameter` - :returns: instance + Returns: + google.cloud.bigquery.query.ScalarQueryParameter: instance """ name = resource.get("name") type_ = resource["parameterType"]["type"] @@ -140,8 +140,8 @@ def from_api_repr(cls, resource): def to_api_repr(self): """Construct JSON API representation for the parameter. - :rtype: dict - :returns: JSON mapping + Returns: + Dict: JSON mapping """ value = self.value converter = _SCALAR_VALUE_TO_JSON_PARAM.get(self.type_) @@ -161,8 +161,7 @@ def _key(self): Used to compute this instance's hashcode and evaluate equality. Returns: - tuple: The contents of this - :class:`~google.cloud.bigquery.query.ScalarQueryParameter`. + Tuple: The contents of this :class:`~google.cloud.bigquery.query.ScalarQueryParameter`. """ return (self.name, self.type_.upper(), self.value) @@ -181,17 +180,16 @@ def __repr__(self): class ArrayQueryParameter(_AbstractQueryParameter): """Named / positional query parameters for array values. - :type name: str or None - :param name: Parameter name, used via ``@foo`` syntax. If None, the - parameter can only be addressed via position (``?``). + Args: + name (Optional[str]): + Parameter name, used via ``@foo`` syntax. If None, the + parameter can only be addressed via position (``?``). - :type array_type: str - :param array_type: - name of type of array elements. One of `'STRING'`, `'INT64'`, - `'FLOAT64'`, `'NUMERIC'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. + array_type (str): + name of type of array elements. One of `'STRING'`, `'INT64'`, + `'FLOAT64'`, `'NUMERIC'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. - :type values: list of appropriate scalar type. - :param values: the parameter array values. + values (List[appropriate scalar type]): the parameter array values. """ def __init__(self, name, array_type, values): @@ -203,16 +201,15 @@ def __init__(self, name, array_type, values): def positional(cls, array_type, values): """Factory for positional parameters. - :type array_type: str - :param array_type: - name of type of array elements. One of `'STRING'`, `'INT64'`, - `'FLOAT64'`, `'NUMERIC'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. + Args: + array_type (str): + name of type of array elements. One of `'STRING'`, `'INT64'`, + `'FLOAT64'`, `'NUMERIC'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. - :type values: list of appropriate scalar type - :param values: the parameter array values. + values (List[appropriate scalar type]): the parameter array values. - :rtype: :class:`~google.cloud.bigquery.query.ArrayQueryParameter` - :returns: instance without name + Returns: + google.cloud.bigquery.query.ArrayQueryParameter: instance without name """ return cls(None, array_type, values) @@ -249,11 +246,11 @@ def _from_api_repr_scalar(cls, resource): def from_api_repr(cls, resource): """Factory: construct parameter from JSON resource. - :type resource: dict - :param resource: JSON mapping of parameter + Args: + resource (Dict): JSON mapping of parameter - :rtype: :class:`~google.cloud.bigquery.query.ArrayQueryParameter` - :returns: instance + Returns: + google.cloud.bigquery.query.ArrayQueryParameter: instance """ array_type = resource["parameterType"]["arrayType"]["type"] if array_type == "STRUCT": @@ -263,8 +260,8 @@ def from_api_repr(cls, resource): def to_api_repr(self): """Construct JSON API representation for the parameter. - :rtype: dict - :returns: JSON mapping + Returns: + Dict: JSON mapping """ values = self.values if self.array_type == "RECORD" or self.array_type == "STRUCT": @@ -291,8 +288,7 @@ def _key(self): Used to compute this instance's hashcode and evaluate equality. Returns: - tuple: The contents of this - :class:`~google.cloud.bigquery.query.ArrayQueryParameter`. + Tuple: The contents of this :class:`~google.cloud.bigquery.query.ArrayQueryParameter`. """ return (self.name, self.array_type.upper(), self.values) @@ -311,15 +307,16 @@ def __repr__(self): class StructQueryParameter(_AbstractQueryParameter): """Named / positional query parameters for struct values. - :type name: str or None - :param name: Parameter name, used via ``@foo`` syntax. If None, the - parameter can only be addressed via position (``?``). + Args: + name (Optional[str]): + Parameter name, used via ``@foo`` syntax. If None, the + parameter can only be addressed via position (``?``). - :type sub_params: - tuple of :class:`~google.cloud.bigquery.query.ScalarQueryParameter`, - :class:`~google.cloud.bigquery.query.ArrayQueryParameter`, or - :class:`~google.cloud.bigquery.query.StructQueryParameter` - :param sub_params: the sub-parameters for the struct + sub_params (Union[Tuple[ + google.cloud.bigquery.query.ScalarQueryParameter, + google.cloud.bigquery.query.ArrayQueryParameter, + google.cloud.bigquery.query.StructQueryParameter + ]]): the sub-parameters for the struct """ def __init__(self, name, *sub_params): @@ -341,15 +338,15 @@ def __init__(self, name, *sub_params): def positional(cls, *sub_params): """Factory for positional parameters. - :type sub_params: - tuple of - :class:`~google.cloud.bigquery.query.ScalarQueryParameter`, - :class:`~google.cloud.bigquery.query.ArrayQueryParameter`, or - :class:`~google.cloud.bigquery.query.StructQueryParameter` - :param sub_params: the sub-parameters for the struct + Args: + sub_params (Union[Tuple[ + google.cloud.bigquery.query.ScalarQueryParameter, + google.cloud.bigquery.query.ArrayQueryParameter, + google.cloud.bigquery.query.StructQueryParameter + ]]): the sub-parameters for the struct - :rtype: :class:`~google.cloud.bigquery.query.StructQueryParameter` - :returns: instance without name + Returns: + google.cloud.bigquery.query.StructQueryParameter: instance without name """ return cls(None, *sub_params) @@ -357,11 +354,11 @@ def positional(cls, *sub_params): def from_api_repr(cls, resource): """Factory: construct parameter from JSON resource. - :type resource: dict - :param resource: JSON mapping of parameter + Args: + resource (Dict): JSON mapping of parameter - :rtype: :class:`~google.cloud.bigquery.query.StructQueryParameter` - :returns: instance + Returns: + google.cloud.bigquery.query.StructQueryParameter: instance """ name = resource.get("name") instance = cls(name) @@ -397,8 +394,8 @@ def from_api_repr(cls, resource): def to_api_repr(self): """Construct JSON API representation for the parameter. - :rtype: dict - :returns: JSON mapping + Returns: + Dict: JSON mapping """ s_types = {} values = {} @@ -432,8 +429,7 @@ def _key(self): Used to compute this instance's hashcode and evaluate equality. Returns: - tuple: The contents of this - :class:`~google.cloud.biquery.ArrayQueryParameter`. + Tuple: The contents of this :class:`~google.cloud.biquery.ArrayQueryParameter`. """ return (self.name, self.struct_types, self.struct_values) @@ -468,8 +464,8 @@ def from_api_repr(cls, api_response): def project(self): """Project bound to the query job. - :rtype: str - :returns: the project that the query job is associated with. + Returns: + str: The project that the query job is associated with. """ return self._properties.get("jobReference", {}).get("projectId") @@ -478,11 +474,12 @@ def cache_hit(self): """Query results served from cache. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#cacheHit + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.cache_hit - :rtype: bool or ``NoneType`` - :returns: True if the query results were served from cache (None - until set by the server). + Returns: + Optional[bool]: + True if the query results were served from cache (None + until set by the server). """ return self._properties.get("cacheHit") @@ -491,11 +488,12 @@ def complete(self): """Server completed query. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#jobComplete + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.job_complete - :rtype: bool or ``NoneType`` - :returns: True if the query completed on the server (None - until set by the server). + Returns: + Optional[bool]: + True if the query completed on the server (None + until set by the server). """ return self._properties.get("jobComplete") @@ -504,11 +502,12 @@ def errors(self): """Errors generated by the query. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#errors + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.errors - :rtype: list of mapping, or ``NoneType`` - :returns: Mappings describing errors generated on the server (None - until set by the server). + Returns: + Optional[List[Mapping]]: + Mappings describing errors generated on the server (None + until set by the server). """ return self._properties.get("errors") @@ -517,10 +516,10 @@ def job_id(self): """Job ID of the query job these results are from. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#jobReference + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.job_reference - :rtype: string - :returns: Job ID of the query job. + Returns: + str: Job ID of the query job. """ return self._properties.get("jobReference", {}).get("jobId") @@ -529,10 +528,10 @@ def page_token(self): """Token for fetching next bach of results. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#pageToken + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.page_token - :rtype: str, or ``NoneType`` - :returns: Token generated on the server (None until set by the server). + Returns: + Optional[str]: Token generated on the server (None until set by the server). """ return self._properties.get("pageToken") @@ -541,10 +540,10 @@ def total_rows(self): """Total number of rows returned by the query. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#totalRows + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.total_rows - :rtype: int, or ``NoneType`` - :returns: Count generated on the server (None until set by the server). + Returns: + Optional[int}: Count generated on the server (None until set by the server). """ total_rows = self._properties.get("totalRows") if total_rows is not None: @@ -555,10 +554,10 @@ def total_bytes_processed(self): """Total number of bytes processed by the query. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#totalBytesProcessed + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.total_bytes_processed - :rtype: int, or ``NoneType`` - :returns: Count generated on the server (None until set by the server). + Returns: + Optional[int]: Count generated on the server (None until set by the server). """ total_bytes_processed = self._properties.get("totalBytesProcessed") if total_bytes_processed is not None: @@ -569,10 +568,10 @@ def num_dml_affected_rows(self): """Total number of rows affected by a DML query. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#numDmlAffectedRows + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.num_dml_affected_rows - :rtype: int, or ``NoneType`` - :returns: Count generated on the server (None until set by the server). + Returns: + Optional[int]: Count generated on the server (None until set by the server). """ num_dml_affected_rows = self._properties.get("numDmlAffectedRows") if num_dml_affected_rows is not None: @@ -583,10 +582,11 @@ def rows(self): """Query results. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#rows + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.rows - :rtype: list of :class:`~google.cloud.bigquery.table.Row` - :returns: fields describing the schema (None until set by the server). + Returns: + Optional[List[google.cloud.bigquery.table.Row]]: + Fields describing the schema (None until set by the server). """ return _rows_from_json(self._properties.get("rows", ()), self.schema) @@ -595,18 +595,19 @@ def schema(self): """Schema for query results. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#schema + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.schema - :rtype: list of :class:`SchemaField`, or ``NoneType`` - :returns: fields describing the schema (None until set by the server). + Returns: + Optional[List[SchemaField]]: + Fields describing the schema (None until set by the server). """ return _parse_schema_resource(self._properties.get("schema", {})) def _set_properties(self, api_response): """Update properties from resource in body of ``api_response`` - :type api_response: dict - :param api_response: response returned from an API call + Args: + api_response (Dict): response returned from an API call """ job_id_present = ( "jobReference" in api_response diff --git a/bigquery/google/cloud/bigquery/routine.py b/bigquery/google/cloud/bigquery/routine.py index d5bb752dfddb..044368e75108 100644 --- a/bigquery/google/cloud/bigquery/routine.py +++ b/bigquery/google/cloud/bigquery/routine.py @@ -31,10 +31,7 @@ class Routine(object): https://cloud.google.com/bigquery/docs/reference/rest/v2/routines Args: - routine_ref (Union[ \ - str, \ - google.cloud.bigquery.routine.RoutineReference, \ - ]): + routine_ref (Union[str, google.cloud.bigquery.routine.RoutineReference]): A pointer to a routine. If ``routine_ref`` is a string, it must included a project ID, dataset ID, and routine ID, each separated by ``.``. @@ -186,7 +183,7 @@ def return_type(self): time. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#resource-routine + https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Routine.FIELDS.return_type """ resource = self._properties.get(self._PROPERTY_TO_API_FIELD["return_type"]) if not resource: @@ -262,8 +259,7 @@ def to_api_repr(self): """Construct the API resource representation of this routine. Returns: - Dict[str, object]: - Routine represented as an API resource. + Dict[str, object]: Routine represented as an API resource. """ return self._properties @@ -281,7 +277,7 @@ class RoutineArgument(object): """Input/output argument of a function or a stored procedure. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines + https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#argument Args: ``**kwargs`` (Dict): @@ -321,7 +317,7 @@ def kind(self): ``ANY_TYPE``. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#ArgumentKind + https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Argument.FIELDS.argument_kind """ return self._properties.get(self._PROPERTY_TO_API_FIELD["kind"]) @@ -344,7 +340,7 @@ def data_type(self): of a variable, e.g., a function argument. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/StandardSqlDataType + https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Argument.FIELDS.data_type """ resource = self._properties.get(self._PROPERTY_TO_API_FIELD["data_type"]) if not resource: @@ -366,8 +362,7 @@ def from_api_repr(cls, resource): """Factory: construct a routine argument given its API representation. Args: - resource (Dict[str, object]): - Resource, as returned from the API. + resource (Dict[str, object]): Resource, as returned from the API. Returns: google.cloud.bigquery.routine.RoutineArgument: @@ -381,8 +376,7 @@ def to_api_repr(self): """Construct the API resource representation of this routine argument. Returns: - Dict[str, object]: - Routine argument represented as an API resource. + Dict[str, object]: Routine argument represented as an API resource. """ return self._properties @@ -406,7 +400,7 @@ class RoutineReference(object): """A pointer to a routine. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines + https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#routinereference """ def __init__(self): @@ -485,8 +479,7 @@ def to_api_repr(self): """Construct the API resource representation of this routine reference. Returns: - Dict[str, object]: - Routine reference represented as an API resource. + Dict[str, object]: Routine reference represented as an API resource. """ return self._properties diff --git a/bigquery/google/cloud/bigquery/schema.py b/bigquery/google/cloud/bigquery/schema.py index e0673d85baf6..d766cb542608 100644 --- a/bigquery/google/cloud/bigquery/schema.py +++ b/bigquery/google/cloud/bigquery/schema.py @@ -14,6 +14,8 @@ """Schemas for BigQuery tables / queries.""" +import collections + from google.cloud.bigquery_v2 import types @@ -51,14 +53,14 @@ class SchemaField(object): name (str): the name of the field. field_type (str): the type of the field. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#schema.fields.type + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema.FIELDS.type mode (str): the mode of the field. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#schema.fields.mode + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema.FIELDS.mode - description (Optional[str]):description for the field. + description (Optional[str]): description for the field. - fields (Tuple[:class:`~google.cloud.bigquery.schema.SchemaField`]): + fields (Tuple[google.cloud.bigquery.schema.SchemaField]): subfields (requires ``field_type`` of 'RECORD'). """ @@ -79,8 +81,7 @@ def from_api_repr(cls, api_repr): :meth:`to_api_repr`. Returns: - google.cloud.biquery.schema.SchemaField: - The ``SchemaField`` object. + google.cloud.biquery.schema.SchemaField: The ``SchemaField`` object. """ # Handle optional properties with default values mode = api_repr.get("mode", "NULLABLE") @@ -104,7 +105,7 @@ def field_type(self): """str: The type of the field. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#schema.fields.type + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema.FIELDS.type """ return self._field_type @@ -113,7 +114,7 @@ def mode(self): """str: The mode of the field. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#schema.fields.mode + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema.FIELDS.mode """ return self._mode @@ -139,8 +140,7 @@ def to_api_repr(self): """Return a dictionary representing this schema field. Returns: - dict: A dictionary representing the SchemaField in a serialized - form. + Dict: A dictionary representing the SchemaField in a serialized form. """ # Put together the basic representation. See http://bit.ly/2hOAT5u. answer = { @@ -164,8 +164,7 @@ def _key(self): Used to compute this instance's hashcode and evaluate equality. Returns: - tuple: The contents of this - :class:`~google.cloud.bigquery.schema.SchemaField`. + Tuple: The contents of this :class:`~google.cloud.bigquery.schema.SchemaField`. """ return ( self._name, @@ -229,11 +228,11 @@ def _parse_schema_resource(info): """Parse a resource fragment into a schema field. Args: - info: (Mapping[str->dict]): should contain a "fields" key to be parsed + info: (Mapping[str, Dict]): should contain a "fields" key to be parsed Returns: - (Union[Sequence[:class:`google.cloud.bigquery.schema.SchemaField`],None]) - a list of parsed fields, or ``None`` if no "fields" key found. + Optional[Sequence[google.cloud.bigquery.schema.SchemaField`]: + A list of parsed fields, or ``None`` if no "fields" key found. """ if "fields" not in info: return () @@ -253,10 +252,42 @@ def _build_schema_resource(fields): """Generate a resource fragment for a schema. Args: - fields [Sequence[:class:`~google.cloud.bigquery.schema.SchemaField`]): - schema to be dumped + fields (Sequence[google.cloud.bigquery.schema.SchemaField): schema to be dumped. - Returns: (Sequence[dict]) - mappings describing the schema of the supplied fields. + Returns: + Sequence[Dict]: Mappings describing the schema of the supplied fields. """ return [field.to_api_repr() for field in fields] + + +def _to_schema_fields(schema): + """Coerce `schema` to a list of schema field instances. + + Args: + schema(Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): + Table schema to convert. If some items are passed as mappings, + their content must be compatible with + :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. + + Returns: + Sequence[:class:`~google.cloud.bigquery.schema.SchemaField`] + + Raises: + Exception: If ``schema`` is not a sequence, or if any item in the + sequence is not a :class:`~google.cloud.bigquery.schema.SchemaField` + instance or a compatible mapping representation of the field. + """ + for field in schema: + if not isinstance(field, (SchemaField, collections.Mapping)): + raise ValueError( + "Schema items must either be fields or compatible " + "mapping representations." + ) + + return [ + field if isinstance(field, SchemaField) else SchemaField.from_api_repr(field) + for field in schema + ] diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index 62072cf88804..2f2ee50cc89e 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -51,10 +51,11 @@ import google.cloud._helpers from google.cloud.bigquery import _helpers from google.cloud.bigquery import _pandas_helpers -from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.schema import _build_schema_resource from google.cloud.bigquery.schema import _parse_schema_resource +from google.cloud.bigquery.schema import _to_schema_fields from google.cloud.bigquery.external_config import ExternalConfig +from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration _LOGGER = logging.getLogger(__name__) @@ -113,78 +114,11 @@ def _view_use_legacy_sql_getter(table): return True -class EncryptionConfiguration(object): - """Custom encryption configuration (e.g., Cloud KMS keys). - - Args: - kms_key_name (str): resource ID of Cloud KMS key used for encryption - """ - - def __init__(self, kms_key_name=None): - self._properties = {} - if kms_key_name is not None: - self._properties["kmsKeyName"] = kms_key_name - - @property - def kms_key_name(self): - """str: Resource ID of Cloud KMS key - - Resource ID of Cloud KMS key or :data:`None` if using default - encryption. - """ - return self._properties.get("kmsKeyName") - - @kms_key_name.setter - def kms_key_name(self, value): - self._properties["kmsKeyName"] = value - - @classmethod - def from_api_repr(cls, resource): - """Construct an encryption configuration from its API representation - - Args: - resource (Dict[str, object]): - An encryption configuration representation as returned from - the API. - - Returns: - google.cloud.bigquery.table.EncryptionConfiguration: - An encryption configuration parsed from ``resource``. - """ - config = cls() - config._properties = copy.deepcopy(resource) - return config - - def to_api_repr(self): - """Construct the API resource representation of this encryption - configuration. - - Returns: - Dict[str, object]: - Encryption configuration as represented as an API resource - """ - return copy.deepcopy(self._properties) - - def __eq__(self, other): - if not isinstance(other, EncryptionConfiguration): - return NotImplemented - return self.kms_key_name == other.kms_key_name - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash(self.kms_key_name) - - def __repr__(self): - return "EncryptionConfiguration({})".format(self.kms_key_name) - - class TableReference(object): """TableReferences are pointers to tables. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#tablereference Args: dataset_ref (google.cloud.bigquery.dataset.DatasetReference): @@ -364,18 +298,20 @@ class Table(object): """Tables represent a set of rows whose values correspond to a schema. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#resource-table Args: - table_ref (Union[ \ - :class:`~google.cloud.bigquery.table.TableReference`, \ - str, \ - ]): + table_ref (Union[google.cloud.bigquery.table.TableReference, str]): A pointer to a table. If ``table_ref`` is a string, it must included a project ID, dataset ID, and table ID, each separated by ``.``. - schema (List[google.cloud.bigquery.schema.SchemaField]): - The table's schema + schema (Optional[Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]]): + The table's schema. If any item is a mapping, its content must be + compatible with + :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. """ _PROPERTY_TO_API_FIELD = { @@ -388,6 +324,7 @@ class Table(object): "view_query": "view", "external_data_configuration": "externalDataConfiguration", "encryption_configuration": "encryptionConfiguration", + "require_partition_filter": "requirePartitionFilter", } def __init__(self, table_ref, schema=None): @@ -423,15 +360,31 @@ def path(self): self.table_id, ) + @property + def require_partition_filter(self): + """bool: If set to true, queries over the partitioned table require a + partition filter that can be used for partition elimination to be + specified. + """ + return self._properties.get("requirePartitionFilter") + + @require_partition_filter.setter + def require_partition_filter(self, value): + self._properties["requirePartitionFilter"] = value + @property def schema(self): - """List[google.cloud.bigquery.schema.SchemaField]: Table's schema. + """Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]: + Table's schema. Raises: - TypeError: If 'value' is not a sequence - ValueError: - If any item in the sequence is not a - :class:`~google.cloud.bigquery.schema.SchemaField` + Exception: + If ``schema`` is not a sequence, or if any item in the sequence + is not a :class:`~google.cloud.bigquery.schema.SchemaField` + instance or a compatible mapping representation of the field. """ prop = self._properties.get("schema") if not prop: @@ -443,9 +396,8 @@ def schema(self): def schema(self, value): if value is None: self._properties["schema"] = None - elif not all(isinstance(field, SchemaField) for field in value): - raise ValueError("Schema items must be fields") else: + value = _to_schema_fields(value) self._properties["schema"] = {"fields": _build_schema_resource(value)} @property @@ -469,7 +421,7 @@ def labels(self, value): @property def encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the table. Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` @@ -561,14 +513,54 @@ def table_type(self): """ return self._properties.get("type") + @property + def range_partitioning(self): + """Optional[google.cloud.bigquery.table.RangePartitioning]: + Configures range-based partitioning for a table. + + .. note:: + **Beta**. The integer range partitioning feature is in a + pre-release state and might change or have limited support. + + Only specify at most one of + :attr:`~google.cloud.bigquery.table.Table.time_partitioning` or + :attr:`~google.cloud.bigquery.table.Table.range_partitioning`. + + Raises: + ValueError: + If the value is not + :class:`~google.cloud.bigquery.table.RangePartitioning` or + :data:`None`. + """ + resource = self._properties.get("rangePartitioning") + if resource is not None: + return RangePartitioning(_properties=resource) + + @range_partitioning.setter + def range_partitioning(self, value): + resource = value + if isinstance(value, RangePartitioning): + resource = value._properties + elif value is not None: + raise ValueError( + "Expected value to be RangePartitioning or None, got {}.".format(value) + ) + self._properties["rangePartitioning"] = resource + @property def time_partitioning(self): - """google.cloud.bigquery.table.TimePartitioning: Configures time-based + """Optional[google.cloud.bigquery.table.TimePartitioning]: Configures time-based partitioning for a table. + Only specify at most one of + :attr:`~google.cloud.bigquery.table.Table.time_partitioning` or + :attr:`~google.cloud.bigquery.table.Table.range_partitioning`. + Raises: ValueError: - If the value is not :class:`TimePartitioning` or :data:`None`. + If the value is not + :class:`~google.cloud.bigquery.table.TimePartitioning` or + :data:`None`. """ prop = self._properties.get("timePartitioning") if prop is not None: @@ -1300,6 +1292,13 @@ class RowIterator(HTTPIterator): api_request (Callable[google.cloud._http.JSONConnection.api_request]): The function to use to make API requests. path (str): The method path to query for the list of items. + schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): + The table's schema. If any item is a mapping, its content must be + compatible with + :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. page_token (str): A token identifying a page in a result set to start fetching results from. max_results (int, optional): The maximum number of results to fetch. @@ -1309,14 +1308,12 @@ class RowIterator(HTTPIterator): extra_params (Dict[str, object]): Extra query string parameters for the API call. table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ ]): Optional. The table which these rows belong to, or a reference to it. Used to call the BigQuery Storage API to fetch rows. - selected_fields (Sequence[ \ - google.cloud.bigquery.schema.SchemaField, \ - ]): + selected_fields (Sequence[google.cloud.bigquery.schema.SchemaField]): Optional. A subset of columns to select from this table. """ @@ -1346,6 +1343,7 @@ def __init__( page_start=_rows_page_start, next_token="pageToken", ) + schema = _to_schema_fields(schema) self._field_to_index = _helpers._field_to_index_mapping(schema) self._page_size = page_size self._preserve_order = False @@ -1481,9 +1479,7 @@ def to_arrow(self, progress_bar_type=None, bqstorage_client=None): ``'tqdm_gui'`` Use the :func:`tqdm.tqdm_gui` function to display a progress bar as a graphical dialog box. - bqstorage_client ( \ - google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient \ - ): + bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): **Beta Feature** Optional. A BigQuery Storage API client. If supplied, use the faster BigQuery Storage API to fetch rows from BigQuery. This API is a billable API. @@ -1501,8 +1497,7 @@ def to_arrow(self, progress_bar_type=None, bqstorage_client=None): from the destination table's schema. Raises: - ValueError: - If the :mod:`pyarrow` library cannot be imported. + ValueError: If the :mod:`pyarrow` library cannot be imported. ..versionadded:: 1.17.0 """ @@ -1567,9 +1562,7 @@ def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=Non """Create a pandas DataFrame by loading all pages of a query. Args: - bqstorage_client ( \ - google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient \ - ): + bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): **Beta Feature** Optional. A BigQuery Storage API client. If supplied, use the faster BigQuery Storage API to fetch rows from BigQuery. This API is a billable API. @@ -1584,9 +1577,7 @@ def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=Non query result tables with the BQ Storage API. When a problem is encountered reading a table, the tabledata.list method from the BigQuery API is used, instead. - dtypes ( \ - Map[str, Union[str, pandas.Series.dtype]] \ - ): + dtypes (Map[str, Union[str, pandas.Series.dtype]]): Optional. A dictionary of column names pandas ``dtype``s. The provided ``dtype`` is used when constructing the series for the column specified. Otherwise, the default pandas behavior @@ -1630,6 +1621,14 @@ def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=Non if dtypes is None: dtypes = {} + if bqstorage_client and self.max_results is not None: + warnings.warn( + "Cannot use bqstorage_client if max_results is set, " + "reverting to fetching data with the tabledata.list endpoint.", + stacklevel=2, + ) + bqstorage_client = None + progress_bar = self._get_progress_bar(progress_bar_type) frames = [] @@ -1672,12 +1671,10 @@ def to_arrow(self, progress_bar_type=None): """[Beta] Create an empty class:`pyarrow.Table`. Args: - progress_bar_type (Optional[str]): - Ignored. Added for compatibility with RowIterator. + progress_bar_type (Optional[str]): Ignored. Added for compatibility with RowIterator. Returns: - pyarrow.Table: - An empty :class:`pyarrow.Table`. + pyarrow.Table: An empty :class:`pyarrow.Table`. """ if pyarrow is None: raise ValueError(_NO_PYARROW_ERROR) @@ -1687,16 +1684,12 @@ def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=Non """Create an empty dataframe. Args: - bqstorage_client (Any): - Ignored. Added for compatibility with RowIterator. - dtypes (Any): - Ignored. Added for compatibility with RowIterator. - progress_bar_type (Any): - Ignored. Added for compatibility with RowIterator. + bqstorage_client (Any): Ignored. Added for compatibility with RowIterator. + dtypes (Any): Ignored. Added for compatibility with RowIterator. + progress_bar_type (Any): Ignored. Added for compatibility with RowIterator. Returns: - pandas.DataFrame: - An empty :class:`~pandas.DataFrame`. + pandas.DataFrame: An empty :class:`~pandas.DataFrame`. """ if pandas is None: raise ValueError(_NO_PANDAS_ERROR) @@ -1706,6 +1699,147 @@ def __iter__(self): return iter(()) +class PartitionRange(object): + """Definition of the ranges for range partitioning. + + .. note:: + **Beta**. The integer range partitioning feature is in a pre-release + state and might change or have limited support. + + Args: + start (Optional[int]): + Sets the + :attr:`~google.cloud.bigquery.table.PartitionRange.start` + property. + end (Optional[int]): + Sets the + :attr:`~google.cloud.bigquery.table.PartitionRange.end` + property. + interval (Optional[int]): + Sets the + :attr:`~google.cloud.bigquery.table.PartitionRange.interval` + property. + _properties (Optional[dict]): + Private. Used to construct object from API resource. + """ + + def __init__(self, start=None, end=None, interval=None, _properties=None): + if _properties is None: + _properties = {} + self._properties = _properties + + if start is not None: + self.start = start + if end is not None: + self.end = end + if interval is not None: + self.interval = interval + + @property + def start(self): + """int: The start of range partitioning, inclusive.""" + return _helpers._int_or_none(self._properties.get("start")) + + @start.setter + def start(self, value): + self._properties["start"] = _helpers._str_or_none(value) + + @property + def end(self): + """int: The end of range partitioning, exclusive.""" + return _helpers._int_or_none(self._properties.get("end")) + + @end.setter + def end(self, value): + self._properties["end"] = _helpers._str_or_none(value) + + @property + def interval(self): + """int: The width of each interval.""" + return _helpers._int_or_none(self._properties.get("interval")) + + @interval.setter + def interval(self, value): + self._properties["interval"] = _helpers._str_or_none(value) + + def _key(self): + return tuple(sorted(self._properties.items())) + + def __repr__(self): + key_vals = ["{}={}".format(key, val) for key, val in self._key()] + return "PartitionRange({})".format(", ".join(key_vals)) + + +class RangePartitioning(object): + """Range-based partitioning configuration for a table. + + .. note:: + **Beta**. The integer range partitioning feature is in a pre-release + state and might change or have limited support. + + Args: + range_ (Optional[google.cloud.bigquery.table.PartitionRange]): + Sets the + :attr:`google.cloud.bigquery.table.RangePartitioning.range_` + property. + field (Optional[str]): + Sets the + :attr:`google.cloud.bigquery.table.RangePartitioning.field` + property. + _properties (Optional[dict]): + Private. Used to construct object from API resource. + """ + + def __init__(self, range_=None, field=None, _properties=None): + if _properties is None: + _properties = {} + self._properties = _properties + + if range_ is not None: + self.range_ = range_ + if field is not None: + self.field = field + + # Trailing underscore to prevent conflict with built-in range() function. + @property + def range_(self): + """google.cloud.bigquery.table.PartitionRange: Defines the + ranges for range partitioning. + + Raises: + ValueError: + If the value is not a :class:`PartitionRange`. + """ + range_properties = self._properties.setdefault("range", {}) + return PartitionRange(_properties=range_properties) + + @range_.setter + def range_(self, value): + if not isinstance(value, PartitionRange): + raise ValueError("Expected a PartitionRange, but got {}.".format(value)) + self._properties["range"] = value._properties + + @property + def field(self): + """str: The table is partitioned by this field. + + The field must be a top-level ``NULLABLE`` / ``REQUIRED`` field. The + only supported type is ``INTEGER`` / ``INT64``. + """ + return self._properties.get("field") + + @field.setter + def field(self, value): + self._properties["field"] = value + + def _key(self): + return (("field", self.field), ("range_", self.range_)) + + def __repr__(self): + key_vals = ["{}={}".format(key, repr(val)) for key, val in self._key()] + return "RangePartitioning({})".format(", ".join(key_vals)) + + class TimePartitioningType(object): """Specifies the type of time partitioning to perform.""" @@ -1730,9 +1864,9 @@ class TimePartitioning(object): Number of milliseconds for which to keep the storage for a partition. require_partition_filter (bool, optional): - If set to true, queries over the partitioned table require a - partition filter that can be used for partition elimination to be - specified. + DEPRECATED: Use + :attr:`~google.cloud.bigquery.table.Table.require_partition_filter`, + instead. """ def __init__( @@ -1785,11 +1919,33 @@ def expiration_ms(self, value): @property def require_partition_filter(self): """bool: Specifies whether partition filters are required for queries + + DEPRECATED: Use + :attr:`~google.cloud.bigquery.table.Table.require_partition_filter`, + instead. """ + warnings.warn( + ( + "TimePartitioning.require_partition_filter will be removed in " + "future versions. Please use Table.require_partition_filter " + "instead." + ), + PendingDeprecationWarning, + stacklevel=2, + ) return self._properties.get("requirePartitionFilter") @require_partition_filter.setter def require_partition_filter(self, value): + warnings.warn( + ( + "TimePartitioning.require_partition_filter will be removed in " + "future versions. Please use Table.require_partition_filter " + "instead." + ), + PendingDeprecationWarning, + stacklevel=2, + ) self._properties["requirePartitionFilter"] = value @classmethod @@ -1864,14 +2020,12 @@ def _item_to_row(iterator, resource): added to the iterator after being created, which should be done by the caller. - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: An item to be converted to a row. + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. + resource (Dict): An item to be converted to a row. - :rtype: :class:`~google.cloud.bigquery.table.Row` - :returns: The next row in the page. + Returns: + google.cloud.bigquery.table.Row: The next row in the page. """ return Row( _helpers._row_tuple_from_json(resource, iterator.schema), @@ -1902,14 +2056,10 @@ def get_column_data(field_index, field): def _rows_page_start(iterator, page, response): """Grab total rows when :class:`~google.cloud.iterator.Page` starts. - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type page: :class:`~google.api_core.page_iterator.Page` - :param page: The page that was just created. - - :type response: dict - :param response: The JSON API response for a page of rows in a table. + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. + page (google.api_core.page_iterator.Page): The page that was just created. + response (Dict): The JSON API response for a page of rows in a table. """ # Make a (lazy) copy of the page in column-oriented format for use in data # science packages. diff --git a/bigquery/google/cloud/bigquery_v2/proto/encryption_config.proto b/bigquery/google/cloud/bigquery_v2/proto/encryption_config.proto new file mode 100644 index 000000000000..54445f0fa770 --- /dev/null +++ b/bigquery/google/cloud/bigquery_v2/proto/encryption_config.proto @@ -0,0 +1,33 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.bigquery.v2; + +import "google/api/field_behavior.proto"; +import "google/protobuf/wrappers.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"; +option java_outer_classname = "EncryptionConfigProto"; +option java_package = "com.google.cloud.bigquery.v2"; + +message EncryptionConfiguration { + // Optional. Describes the Cloud KMS encryption key that will be used to + // protect destination BigQuery table. The BigQuery Service Account associated + // with your project requires access to this encryption key. + google.protobuf.StringValue kms_key_name = 1 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2.py b/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2.py new file mode 100644 index 000000000000..f7b26be5547f --- /dev/null +++ b/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/bigquery_v2/proto/encryption_config.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/bigquery_v2/proto/encryption_config.proto", + package="google.cloud.bigquery.v2", + syntax="proto3", + serialized_options=_b( + "\n\034com.google.cloud.bigquery.v2B\025EncryptionConfigProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery" + ), + serialized_pb=_b( + '\n6google/cloud/bigquery_v2/proto/encryption_config.proto\x12\x18google.cloud.bigquery.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"R\n\x17\x45ncryptionConfiguration\x12\x37\n\x0ckms_key_name\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.StringValueB\x03\xe0\x41\x01\x42w\n\x1c\x63om.google.cloud.bigquery.v2B\x15\x45ncryptionConfigProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3' + ), + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + ], +) + + +_ENCRYPTIONCONFIGURATION = _descriptor.Descriptor( + name="EncryptionConfiguration", + full_name="google.cloud.bigquery.v2.EncryptionConfiguration", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="kms_key_name", + full_name="google.cloud.bigquery.v2.EncryptionConfiguration.kms_key_name", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=179, + serialized_end=261, +) + +_ENCRYPTIONCONFIGURATION.fields_by_name[ + "kms_key_name" +].message_type = google_dot_protobuf_dot_wrappers__pb2._STRINGVALUE +DESCRIPTOR.message_types_by_name["EncryptionConfiguration"] = _ENCRYPTIONCONFIGURATION +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +EncryptionConfiguration = _reflection.GeneratedProtocolMessageType( + "EncryptionConfiguration", + (_message.Message,), + dict( + DESCRIPTOR=_ENCRYPTIONCONFIGURATION, + __module__="google.cloud.bigquery_v2.proto.encryption_config_pb2", + __doc__="""Encryption configuration. + + Attributes: + kms_key_name: + Optional. Describes the Cloud KMS encryption key that will be + used to protect destination BigQuery table. The BigQuery + Service Account associated with your project requires access + to this encryption key. + """, + # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.EncryptionConfiguration) + ), +) +_sym_db.RegisterMessage(EncryptionConfiguration) + + +DESCRIPTOR._options = None +_ENCRYPTIONCONFIGURATION.fields_by_name["kms_key_name"]._options = None +# @@protoc_insertion_point(module_scope) diff --git a/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2_grpc.py b/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/bigquery/google/cloud/bigquery_v2/proto/model.proto b/bigquery/google/cloud/bigquery_v2/proto/model.proto index 42246e8efff2..13d980774413 100644 --- a/bigquery/google/cloud/bigquery_v2/proto/model.proto +++ b/bigquery/google/cloud/bigquery_v2/proto/model.proto @@ -17,13 +17,15 @@ syntax = "proto3"; package google.cloud.bigquery.v2; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/cloud/bigquery/v2/encryption_config.proto"; import "google/cloud/bigquery/v2/model_reference.proto"; import "google/cloud/bigquery/v2/standard_sql.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; import "google/protobuf/wrappers.proto"; import "google/api/annotations.proto"; -import "google/api/client.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"; option java_outer_classname = "ModelProto"; @@ -39,19 +41,23 @@ service ModelService { // Gets the specified model resource by model ID. rpc GetModel(GetModelRequest) returns (Model) { + option (google.api.method_signature) = "project_id,dataset_id,model_id"; } // Lists all models in the specified dataset. Requires the READER dataset // role. rpc ListModels(ListModelsRequest) returns (ListModelsResponse) { + option (google.api.method_signature) = "project_id,dataset_id,max_results"; } // Patch specific fields in the specified model. rpc PatchModel(PatchModelRequest) returns (Model) { + option (google.api.method_signature) = "project_id,dataset_id,model_id,model"; } // Deletes the model specified by modelId from the dataset. rpc DeleteModel(DeleteModelRequest) returns (google.protobuf.Empty) { + option (google.api.method_signature) = "project_id,dataset_id,model_id"; } } @@ -236,7 +242,7 @@ message Model { // Counts of all categories for the categorical feature. If there are // more than ten categories, we return top ten (by count) and return - // one more CategoryCount with category ‘_OTHER_’ and count as + // one more CategoryCount with category "_OTHER_" and count as // aggregate counts of remaining categories. repeated CategoryCount category_counts = 1; } @@ -514,103 +520,105 @@ message Model { } // Output only. A hash of this resource. - string etag = 1; + string etag = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Required. Unique identifier for this model. - ModelReference model_reference = 2; + ModelReference model_reference = 2 [(google.api.field_behavior) = REQUIRED]; - // Output only. The time when this model was created, in millisecs since the - // epoch. - int64 creation_time = 5; + // Output only. The time when this model was created, in millisecs since the epoch. + int64 creation_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. The time when this model was last modified, in millisecs - // since the epoch. - int64 last_modified_time = 6; + // Output only. The time when this model was last modified, in millisecs since the epoch. + int64 last_modified_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - // [Optional] A user-friendly description of this model. - string description = 12; + // Optional. A user-friendly description of this model. + string description = 12 [(google.api.field_behavior) = OPTIONAL]; - // [Optional] A descriptive name for this model. - string friendly_name = 14; + // Optional. A descriptive name for this model. + string friendly_name = 14 [(google.api.field_behavior) = OPTIONAL]; - // [Optional] The labels associated with this model. You can use these to - // organize and group your models. Label keys and values can be no longer + // The labels associated with this model. You can use these to organize + // and group your models. Label keys and values can be no longer // than 63 characters, can only contain lowercase letters, numeric // characters, underscores and dashes. International characters are allowed. // Label values are optional. Label keys must start with a letter and each // label in the list must have a different key. map labels = 15; - // [Optional] The time when this model expires, in milliseconds since the - // epoch. If not present, the model will persist indefinitely. Expired models + // Optional. The time when this model expires, in milliseconds since the epoch. + // If not present, the model will persist indefinitely. Expired models // will be deleted and their storage reclaimed. The defaultTableExpirationMs // property of the encapsulating dataset can be used to set a default // expirationTime on newly created models. - int64 expiration_time = 16; + int64 expiration_time = 16 [(google.api.field_behavior) = OPTIONAL]; // Output only. The geographic location where the model resides. This value // is inherited from the dataset. - string location = 13; + string location = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Custom encryption configuration (e.g., Cloud KMS keys). This shows the + // encryption configuration of the model data while stored in BigQuery + // storage. + google.cloud.bigquery.v2.EncryptionConfiguration encryption_configuration = 17; // Output only. Type of the model resource. - ModelType model_type = 7; + ModelType model_type = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. Information for all training runs in increasing order of - // start_time. - repeated TrainingRun training_runs = 9; + // Output only. Information for all training runs in increasing order of start_time. + repeated TrainingRun training_runs = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Input feature columns that were used to train this model. - repeated StandardSqlField feature_columns = 10; + repeated StandardSqlField feature_columns = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Label columns that were used to train this model. // The output of the model will have a "predicted_" prefix to these columns. - repeated StandardSqlField label_columns = 11; + repeated StandardSqlField label_columns = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; } message GetModelRequest { - // Project ID of the requested model. - string project_id = 1; + // Required. Project ID of the requested model. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Dataset ID of the requested model. - string dataset_id = 2; + // Required. Dataset ID of the requested model. + string dataset_id = 2 [(google.api.field_behavior) = REQUIRED]; - // Model ID of the requested model. - string model_id = 3; + // Required. Model ID of the requested model. + string model_id = 3 [(google.api.field_behavior) = REQUIRED]; } message PatchModelRequest { - // Project ID of the model to patch. - string project_id = 1; + // Required. Project ID of the model to patch. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Dataset ID of the model to patch. - string dataset_id = 2; + // Required. Dataset ID of the model to patch. + string dataset_id = 2 [(google.api.field_behavior) = REQUIRED]; - // Model ID of the model to patch. - string model_id = 3; + // Required. Model ID of the model to patch. + string model_id = 3 [(google.api.field_behavior) = REQUIRED]; - // Patched model. + // Required. Patched model. // Follows RFC5789 patch semantics. Missing fields are not updated. // To clear a field, explicitly set to default value. - Model model = 4; + Model model = 4 [(google.api.field_behavior) = REQUIRED]; } message DeleteModelRequest { - // Project ID of the model to delete. - string project_id = 1; + // Required. Project ID of the model to delete. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Dataset ID of the model to delete. - string dataset_id = 2; + // Required. Dataset ID of the model to delete. + string dataset_id = 2 [(google.api.field_behavior) = REQUIRED]; - // Model ID of the model to delete. - string model_id = 3; + // Required. Model ID of the model to delete. + string model_id = 3 [(google.api.field_behavior) = REQUIRED]; } message ListModelsRequest { - // Project ID of the models to list. - string project_id = 1; + // Required. Project ID of the models to list. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Dataset ID of the models to list. - string dataset_id = 2; + // Required. Dataset ID of the models to list. + string dataset_id = 2 [(google.api.field_behavior) = REQUIRED]; // The maximum number of results to return in a single response page. // Leverage the page tokens to iterate through the entire collection. diff --git a/bigquery/google/cloud/bigquery_v2/proto/model_pb2.py b/bigquery/google/cloud/bigquery_v2/proto/model_pb2.py index 45e6cefdf72c..3994660ec46d 100644 --- a/bigquery/google/cloud/bigquery_v2/proto/model_pb2.py +++ b/bigquery/google/cloud/bigquery_v2/proto/model_pb2.py @@ -15,6 +15,11 @@ _sym_db = _symbol_database.Default() +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.cloud.bigquery_v2.proto import ( + encryption_config_pb2 as google_dot_cloud_dot_bigquery__v2_dot_proto_dot_encryption__config__pb2, +) from google.cloud.bigquery_v2.proto import ( model_reference_pb2 as google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__reference__pb2, ) @@ -25,7 +30,6 @@ from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -36,16 +40,18 @@ "\n\034com.google.cloud.bigquery.v2B\nModelProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery" ), serialized_pb=_b( - '\n*google/cloud/bigquery_v2/proto/model.proto\x12\x18google.cloud.bigquery.v2\x1a\x34google/cloud/bigquery_v2/proto/model_reference.proto\x1a\x31google/cloud/bigquery_v2/proto/standard_sql.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\x8a\x34\n\x05Model\x12\x0c\n\x04\x65tag\x18\x01 \x01(\t\x12\x41\n\x0fmodel_reference\x18\x02 \x01(\x0b\x32(.google.cloud.bigquery.v2.ModelReference\x12\x15\n\rcreation_time\x18\x05 \x01(\x03\x12\x1a\n\x12last_modified_time\x18\x06 \x01(\x03\x12\x13\n\x0b\x64\x65scription\x18\x0c \x01(\t\x12\x15\n\rfriendly_name\x18\x0e \x01(\t\x12;\n\x06labels\x18\x0f \x03(\x0b\x32+.google.cloud.bigquery.v2.Model.LabelsEntry\x12\x17\n\x0f\x65xpiration_time\x18\x10 \x01(\x03\x12\x10\n\x08location\x18\r \x01(\t\x12=\n\nmodel_type\x18\x07 \x01(\x0e\x32).google.cloud.bigquery.v2.Model.ModelType\x12\x42\n\rtraining_runs\x18\t \x03(\x0b\x32+.google.cloud.bigquery.v2.Model.TrainingRun\x12\x43\n\x0f\x66\x65\x61ture_columns\x18\n \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlField\x12\x41\n\rlabel_columns\x18\x0b \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlField\x1aq\n\x0bKmeansEnums"b\n\x1aKmeansInitializationMethod\x12,\n(KMEANS_INITIALIZATION_METHOD_UNSPECIFIED\x10\x00\x12\n\n\x06RANDOM\x10\x01\x12\n\n\x06\x43USTOM\x10\x02\x1a\xb4\x02\n\x11RegressionMetrics\x12\x39\n\x13mean_absolute_error\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x38\n\x12mean_squared_error\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12<\n\x16mean_squared_log_error\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15median_absolute_error\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tr_squared\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\xef\x02\n\x1e\x41ggregateClassificationMetrics\x12/\n\tprecision\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12,\n\x06recall\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x61\x63\x63uracy\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tthreshold\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x66\x31_score\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08log_loss\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12-\n\x07roc_auc\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\x9f\x06\n\x1b\x42inaryClassificationMetrics\x12h\n aggregate_classification_metrics\x18\x01 \x01(\x0b\x32>.google.cloud.bigquery.v2.Model.AggregateClassificationMetrics\x12w\n\x1c\x62inary_confusion_matrix_list\x18\x02 \x03(\x0b\x32Q.google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix\x12\x16\n\x0epositive_label\x18\x03 \x01(\t\x12\x16\n\x0enegative_label\x18\x04 \x01(\t\x1a\xec\x03\n\x15\x42inaryConfusionMatrix\x12>\n\x18positive_class_threshold\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x33\n\x0etrue_positives\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x34\n\x0f\x66\x61lse_positives\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\x0etrue_negatives\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x34\n\x0f\x66\x61lse_negatives\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12/\n\tprecision\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12,\n\x06recall\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x66\x31_score\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x61\x63\x63uracy\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\x87\x05\n\x1fMultiClassClassificationMetrics\x12h\n aggregate_classification_metrics\x18\x01 \x01(\x0b\x32>.google.cloud.bigquery.v2.Model.AggregateClassificationMetrics\x12n\n\x15\x63onfusion_matrix_list\x18\x02 \x03(\x0b\x32O.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix\x1a\x89\x03\n\x0f\x43onfusionMatrix\x12:\n\x14\x63onfidence_threshold\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x61\n\x04rows\x18\x02 \x03(\x0b\x32S.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row\x1aQ\n\x05\x45ntry\x12\x17\n\x0fpredicted_label\x18\x01 \x01(\t\x12/\n\nitem_count\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a\x83\x01\n\x03Row\x12\x14\n\x0c\x61\x63tual_label\x18\x01 \x01(\t\x12\x66\n\x07\x65ntries\x18\x02 \x03(\x0b\x32U.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry\x1a\xcb\x06\n\x11\x43lusteringMetrics\x12:\n\x14\x64\x61vies_bouldin_index\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15mean_squared_distance\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12K\n\x08\x63lusters\x18\x03 \x03(\x0b\x32\x39.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster\x1a\xef\x04\n\x07\x43luster\x12\x13\n\x0b\x63\x65ntroid_id\x18\x01 \x01(\x03\x12^\n\x0e\x66\x65\x61ture_values\x18\x02 \x03(\x0b\x32\x46.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue\x12*\n\x05\x63ount\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a\xc2\x03\n\x0c\x46\x65\x61tureValue\x12\x16\n\x0e\x66\x65\x61ture_column\x18\x01 \x01(\t\x12\x37\n\x0fnumerical_value\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValueH\x00\x12t\n\x11\x63\x61tegorical_value\x18\x03 \x01(\x0b\x32W.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValueH\x00\x1a\xe1\x01\n\x10\x43\x61tegoricalValue\x12~\n\x0f\x63\x61tegory_counts\x18\x01 \x03(\x0b\x32\x65.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount\x1aM\n\rCategoryCount\x12\x10\n\x08\x63\x61tegory\x18\x01 \x01(\t\x12*\n\x05\x63ount\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x07\n\x05value\x1a\x95\x03\n\x11\x45valuationMetrics\x12O\n\x12regression_metrics\x18\x01 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.RegressionMetricsH\x00\x12\x64\n\x1d\x62inary_classification_metrics\x18\x02 \x01(\x0b\x32;.google.cloud.bigquery.v2.Model.BinaryClassificationMetricsH\x00\x12m\n"multi_class_classification_metrics\x18\x03 \x01(\x0b\x32?.google.cloud.bigquery.v2.Model.MultiClassClassificationMetricsH\x00\x12O\n\x12\x63lustering_metrics\x18\x04 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.ClusteringMetricsH\x00\x42\t\n\x07metrics\x1a\xab\x0f\n\x0bTrainingRun\x12U\n\x10training_options\x18\x01 \x01(\x0b\x32;.google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions\x12.\n\nstart_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12L\n\x07results\x18\x06 \x03(\x0b\x32;.google.cloud.bigquery.v2.Model.TrainingRun.IterationResult\x12M\n\x12\x65valuation_metrics\x18\x07 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.EvaluationMetrics\x1a\x9d\t\n\x0fTrainingOptions\x12\x16\n\x0emax_iterations\x18\x01 \x01(\x03\x12;\n\tloss_type\x18\x02 \x01(\x0e\x32(.google.cloud.bigquery.v2.Model.LossType\x12\x12\n\nlearn_rate\x18\x03 \x01(\x01\x12\x37\n\x11l1_regularization\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x37\n\x11l2_regularization\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15min_relative_progress\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\nwarm_start\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12.\n\nearly_stop\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x1b\n\x13input_label_columns\x18\t \x03(\t\x12J\n\x11\x64\x61ta_split_method\x18\n \x01(\x0e\x32/.google.cloud.bigquery.v2.Model.DataSplitMethod\x12 \n\x18\x64\x61ta_split_eval_fraction\x18\x0b \x01(\x01\x12\x19\n\x11\x64\x61ta_split_column\x18\x0c \x01(\t\x12N\n\x13learn_rate_strategy\x18\r \x01(\x0e\x32\x31.google.cloud.bigquery.v2.Model.LearnRateStrategy\x12\x1a\n\x12initial_learn_rate\x18\x10 \x01(\x01\x12o\n\x13label_class_weights\x18\x11 \x03(\x0b\x32R.google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.LabelClassWeightsEntry\x12\x43\n\rdistance_type\x18\x14 \x01(\x0e\x32,.google.cloud.bigquery.v2.Model.DistanceType\x12\x14\n\x0cnum_clusters\x18\x15 \x01(\x03\x12\x11\n\tmodel_uri\x18\x16 \x01(\t\x12S\n\x15optimization_strategy\x18\x17 \x01(\x0e\x32\x34.google.cloud.bigquery.v2.Model.OptimizationStrategy\x12l\n\x1ckmeans_initialization_method\x18! \x01(\x0e\x32\x46.google.cloud.bigquery.v2.Model.KmeansEnums.KmeansInitializationMethod\x12$\n\x1ckmeans_initialization_column\x18" \x01(\t\x1a\x38\n\x16LabelClassWeightsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\xd7\x03\n\x0fIterationResult\x12*\n\x05index\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0b\x64uration_ms\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\rtraining_loss\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\teval_loss\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x12\n\nlearn_rate\x18\x07 \x01(\x01\x12^\n\rcluster_infos\x18\x08 \x03(\x0b\x32G.google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo\x1a\x8b\x01\n\x0b\x43lusterInfo\x12\x13\n\x0b\x63\x65ntroid_id\x18\x01 \x01(\x03\x12\x34\n\x0e\x63luster_radius\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x31\n\x0c\x63luster_size\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"s\n\tModelType\x12\x1a\n\x16MODEL_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11LINEAR_REGRESSION\x10\x01\x12\x17\n\x13LOGISTIC_REGRESSION\x10\x02\x12\n\n\x06KMEANS\x10\x03\x12\x0e\n\nTENSORFLOW\x10\x06"O\n\x08LossType\x12\x19\n\x15LOSS_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11MEAN_SQUARED_LOSS\x10\x01\x12\x11\n\rMEAN_LOG_LOSS\x10\x02"H\n\x0c\x44istanceType\x12\x1d\n\x19\x44ISTANCE_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tEUCLIDEAN\x10\x01\x12\n\n\x06\x43OSINE\x10\x02"z\n\x0f\x44\x61taSplitMethod\x12!\n\x1d\x44\x41TA_SPLIT_METHOD_UNSPECIFIED\x10\x00\x12\n\n\x06RANDOM\x10\x01\x12\n\n\x06\x43USTOM\x10\x02\x12\x0e\n\nSEQUENTIAL\x10\x03\x12\x0c\n\x08NO_SPLIT\x10\x04\x12\x0e\n\nAUTO_SPLIT\x10\x05"W\n\x11LearnRateStrategy\x12#\n\x1fLEARN_RATE_STRATEGY_UNSPECIFIED\x10\x00\x12\x0f\n\x0bLINE_SEARCH\x10\x01\x12\x0c\n\x08\x43ONSTANT\x10\x02"n\n\x14OptimizationStrategy\x12%\n!OPTIMIZATION_STRATEGY_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x42\x41TCH_GRADIENT_DESCENT\x10\x01\x12\x13\n\x0fNORMAL_EQUATION\x10\x02"K\n\x0fGetModelRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x12\x10\n\x08model_id\x18\x03 \x01(\t"}\n\x11PatchModelRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12.\n\x05model\x18\x04 \x01(\x0b\x32\x1f.google.cloud.bigquery.v2.Model"N\n\x12\x44\x65leteModelRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x12\x10\n\x08model_id\x18\x03 \x01(\t"\x82\x01\n\x11ListModelsRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x12\x31\n\x0bmax_results\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x12\n\npage_token\x18\x04 \x01(\t"^\n\x12ListModelsResponse\x12/\n\x06models\x18\x01 \x03(\x0b\x32\x1f.google.cloud.bigquery.v2.Model\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xeb\x04\n\x0cModelService\x12X\n\x08GetModel\x12).google.cloud.bigquery.v2.GetModelRequest\x1a\x1f.google.cloud.bigquery.v2.Model"\x00\x12i\n\nListModels\x12+.google.cloud.bigquery.v2.ListModelsRequest\x1a,.google.cloud.bigquery.v2.ListModelsResponse"\x00\x12\\\n\nPatchModel\x12+.google.cloud.bigquery.v2.PatchModelRequest\x1a\x1f.google.cloud.bigquery.v2.Model"\x00\x12U\n\x0b\x44\x65leteModel\x12,.google.cloud.bigquery.v2.DeleteModelRequest\x1a\x16.google.protobuf.Empty"\x00\x1a\xe0\x01\xca\x41\x17\x62igquery.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-onlyBl\n\x1c\x63om.google.cloud.bigquery.v2B\nModelProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3' + '\n*google/cloud/bigquery_v2/proto/model.proto\x12\x18google.cloud.bigquery.v2\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x36google/cloud/bigquery_v2/proto/encryption_config.proto\x1a\x34google/cloud/bigquery_v2/proto/model_reference.proto\x1a\x31google/cloud/bigquery_v2/proto/standard_sql.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"\x9b\x35\n\x05Model\x12\x11\n\x04\x65tag\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x46\n\x0fmodel_reference\x18\x02 \x01(\x0b\x32(.google.cloud.bigquery.v2.ModelReferenceB\x03\xe0\x41\x02\x12\x1a\n\rcreation_time\x18\x05 \x01(\x03\x42\x03\xe0\x41\x03\x12\x1f\n\x12last_modified_time\x18\x06 \x01(\x03\x42\x03\xe0\x41\x03\x12\x18\n\x0b\x64\x65scription\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1a\n\rfriendly_name\x18\x0e \x01(\tB\x03\xe0\x41\x01\x12;\n\x06labels\x18\x0f \x03(\x0b\x32+.google.cloud.bigquery.v2.Model.LabelsEntry\x12\x1c\n\x0f\x65xpiration_time\x18\x10 \x01(\x03\x42\x03\xe0\x41\x01\x12\x15\n\x08location\x18\r \x01(\tB\x03\xe0\x41\x03\x12S\n\x18\x65ncryption_configuration\x18\x11 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.EncryptionConfiguration\x12\x42\n\nmodel_type\x18\x07 \x01(\x0e\x32).google.cloud.bigquery.v2.Model.ModelTypeB\x03\xe0\x41\x03\x12G\n\rtraining_runs\x18\t \x03(\x0b\x32+.google.cloud.bigquery.v2.Model.TrainingRunB\x03\xe0\x41\x03\x12H\n\x0f\x66\x65\x61ture_columns\x18\n \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldB\x03\xe0\x41\x03\x12\x46\n\rlabel_columns\x18\x0b \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldB\x03\xe0\x41\x03\x1aq\n\x0bKmeansEnums"b\n\x1aKmeansInitializationMethod\x12,\n(KMEANS_INITIALIZATION_METHOD_UNSPECIFIED\x10\x00\x12\n\n\x06RANDOM\x10\x01\x12\n\n\x06\x43USTOM\x10\x02\x1a\xb4\x02\n\x11RegressionMetrics\x12\x39\n\x13mean_absolute_error\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x38\n\x12mean_squared_error\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12<\n\x16mean_squared_log_error\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15median_absolute_error\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tr_squared\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\xef\x02\n\x1e\x41ggregateClassificationMetrics\x12/\n\tprecision\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12,\n\x06recall\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x61\x63\x63uracy\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tthreshold\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x66\x31_score\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08log_loss\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12-\n\x07roc_auc\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\x9f\x06\n\x1b\x42inaryClassificationMetrics\x12h\n aggregate_classification_metrics\x18\x01 \x01(\x0b\x32>.google.cloud.bigquery.v2.Model.AggregateClassificationMetrics\x12w\n\x1c\x62inary_confusion_matrix_list\x18\x02 \x03(\x0b\x32Q.google.cloud.bigquery.v2.Model.BinaryClassificationMetrics.BinaryConfusionMatrix\x12\x16\n\x0epositive_label\x18\x03 \x01(\t\x12\x16\n\x0enegative_label\x18\x04 \x01(\t\x1a\xec\x03\n\x15\x42inaryConfusionMatrix\x12>\n\x18positive_class_threshold\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x33\n\x0etrue_positives\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x34\n\x0f\x66\x61lse_positives\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\x0etrue_negatives\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x34\n\x0f\x66\x61lse_negatives\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12/\n\tprecision\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12,\n\x06recall\x18\x07 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x66\x31_score\x18\x08 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\x08\x61\x63\x63uracy\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x1a\x87\x05\n\x1fMultiClassClassificationMetrics\x12h\n aggregate_classification_metrics\x18\x01 \x01(\x0b\x32>.google.cloud.bigquery.v2.Model.AggregateClassificationMetrics\x12n\n\x15\x63onfusion_matrix_list\x18\x02 \x03(\x0b\x32O.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix\x1a\x89\x03\n\x0f\x43onfusionMatrix\x12:\n\x14\x63onfidence_threshold\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x61\n\x04rows\x18\x02 \x03(\x0b\x32S.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Row\x1aQ\n\x05\x45ntry\x12\x17\n\x0fpredicted_label\x18\x01 \x01(\t\x12/\n\nitem_count\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a\x83\x01\n\x03Row\x12\x14\n\x0c\x61\x63tual_label\x18\x01 \x01(\t\x12\x66\n\x07\x65ntries\x18\x02 \x03(\x0b\x32U.google.cloud.bigquery.v2.Model.MultiClassClassificationMetrics.ConfusionMatrix.Entry\x1a\xcb\x06\n\x11\x43lusteringMetrics\x12:\n\x14\x64\x61vies_bouldin_index\x18\x01 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15mean_squared_distance\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12K\n\x08\x63lusters\x18\x03 \x03(\x0b\x32\x39.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster\x1a\xef\x04\n\x07\x43luster\x12\x13\n\x0b\x63\x65ntroid_id\x18\x01 \x01(\x03\x12^\n\x0e\x66\x65\x61ture_values\x18\x02 \x03(\x0b\x32\x46.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue\x12*\n\x05\x63ount\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a\xc2\x03\n\x0c\x46\x65\x61tureValue\x12\x16\n\x0e\x66\x65\x61ture_column\x18\x01 \x01(\t\x12\x37\n\x0fnumerical_value\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValueH\x00\x12t\n\x11\x63\x61tegorical_value\x18\x03 \x01(\x0b\x32W.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValueH\x00\x1a\xe1\x01\n\x10\x43\x61tegoricalValue\x12~\n\x0f\x63\x61tegory_counts\x18\x01 \x03(\x0b\x32\x65.google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue.CategoryCount\x1aM\n\rCategoryCount\x12\x10\n\x08\x63\x61tegory\x18\x01 \x01(\t\x12*\n\x05\x63ount\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x07\n\x05value\x1a\x95\x03\n\x11\x45valuationMetrics\x12O\n\x12regression_metrics\x18\x01 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.RegressionMetricsH\x00\x12\x64\n\x1d\x62inary_classification_metrics\x18\x02 \x01(\x0b\x32;.google.cloud.bigquery.v2.Model.BinaryClassificationMetricsH\x00\x12m\n"multi_class_classification_metrics\x18\x03 \x01(\x0b\x32?.google.cloud.bigquery.v2.Model.MultiClassClassificationMetricsH\x00\x12O\n\x12\x63lustering_metrics\x18\x04 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.ClusteringMetricsH\x00\x42\t\n\x07metrics\x1a\xab\x0f\n\x0bTrainingRun\x12U\n\x10training_options\x18\x01 \x01(\x0b\x32;.google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions\x12.\n\nstart_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12L\n\x07results\x18\x06 \x03(\x0b\x32;.google.cloud.bigquery.v2.Model.TrainingRun.IterationResult\x12M\n\x12\x65valuation_metrics\x18\x07 \x01(\x0b\x32\x31.google.cloud.bigquery.v2.Model.EvaluationMetrics\x1a\x9d\t\n\x0fTrainingOptions\x12\x16\n\x0emax_iterations\x18\x01 \x01(\x03\x12;\n\tloss_type\x18\x02 \x01(\x0e\x32(.google.cloud.bigquery.v2.Model.LossType\x12\x12\n\nlearn_rate\x18\x03 \x01(\x01\x12\x37\n\x11l1_regularization\x18\x04 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x37\n\x11l2_regularization\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12;\n\x15min_relative_progress\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12.\n\nwarm_start\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12.\n\nearly_stop\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x1b\n\x13input_label_columns\x18\t \x03(\t\x12J\n\x11\x64\x61ta_split_method\x18\n \x01(\x0e\x32/.google.cloud.bigquery.v2.Model.DataSplitMethod\x12 \n\x18\x64\x61ta_split_eval_fraction\x18\x0b \x01(\x01\x12\x19\n\x11\x64\x61ta_split_column\x18\x0c \x01(\t\x12N\n\x13learn_rate_strategy\x18\r \x01(\x0e\x32\x31.google.cloud.bigquery.v2.Model.LearnRateStrategy\x12\x1a\n\x12initial_learn_rate\x18\x10 \x01(\x01\x12o\n\x13label_class_weights\x18\x11 \x03(\x0b\x32R.google.cloud.bigquery.v2.Model.TrainingRun.TrainingOptions.LabelClassWeightsEntry\x12\x43\n\rdistance_type\x18\x14 \x01(\x0e\x32,.google.cloud.bigquery.v2.Model.DistanceType\x12\x14\n\x0cnum_clusters\x18\x15 \x01(\x03\x12\x11\n\tmodel_uri\x18\x16 \x01(\t\x12S\n\x15optimization_strategy\x18\x17 \x01(\x0e\x32\x34.google.cloud.bigquery.v2.Model.OptimizationStrategy\x12l\n\x1ckmeans_initialization_method\x18! \x01(\x0e\x32\x46.google.cloud.bigquery.v2.Model.KmeansEnums.KmeansInitializationMethod\x12$\n\x1ckmeans_initialization_column\x18" \x01(\t\x1a\x38\n\x16LabelClassWeightsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x1a\xd7\x03\n\x0fIterationResult\x12*\n\x05index\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x12\x30\n\x0b\x64uration_ms\x18\x04 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12\x33\n\rtraining_loss\x18\x05 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\teval_loss\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x12\n\nlearn_rate\x18\x07 \x01(\x01\x12^\n\rcluster_infos\x18\x08 \x03(\x0b\x32G.google.cloud.bigquery.v2.Model.TrainingRun.IterationResult.ClusterInfo\x1a\x8b\x01\n\x0b\x43lusterInfo\x12\x13\n\x0b\x63\x65ntroid_id\x18\x01 \x01(\x03\x12\x34\n\x0e\x63luster_radius\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12\x31\n\x0c\x63luster_size\x18\x03 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"s\n\tModelType\x12\x1a\n\x16MODEL_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11LINEAR_REGRESSION\x10\x01\x12\x17\n\x13LOGISTIC_REGRESSION\x10\x02\x12\n\n\x06KMEANS\x10\x03\x12\x0e\n\nTENSORFLOW\x10\x06"O\n\x08LossType\x12\x19\n\x15LOSS_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11MEAN_SQUARED_LOSS\x10\x01\x12\x11\n\rMEAN_LOG_LOSS\x10\x02"H\n\x0c\x44istanceType\x12\x1d\n\x19\x44ISTANCE_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tEUCLIDEAN\x10\x01\x12\n\n\x06\x43OSINE\x10\x02"z\n\x0f\x44\x61taSplitMethod\x12!\n\x1d\x44\x41TA_SPLIT_METHOD_UNSPECIFIED\x10\x00\x12\n\n\x06RANDOM\x10\x01\x12\n\n\x06\x43USTOM\x10\x02\x12\x0e\n\nSEQUENTIAL\x10\x03\x12\x0c\n\x08NO_SPLIT\x10\x04\x12\x0e\n\nAUTO_SPLIT\x10\x05"W\n\x11LearnRateStrategy\x12#\n\x1fLEARN_RATE_STRATEGY_UNSPECIFIED\x10\x00\x12\x0f\n\x0bLINE_SEARCH\x10\x01\x12\x0c\n\x08\x43ONSTANT\x10\x02"n\n\x14OptimizationStrategy\x12%\n!OPTIMIZATION_STRATEGY_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x42\x41TCH_GRADIENT_DESCENT\x10\x01\x12\x13\n\x0fNORMAL_EQUATION\x10\x02"Z\n\x0fGetModelRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02"\x91\x01\n\x11PatchModelRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x05model\x18\x04 \x01(\x0b\x32\x1f.google.cloud.bigquery.v2.ModelB\x03\xe0\x41\x02"]\n\x12\x44\x65leteModelRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02"\x8c\x01\n\x11ListModelsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x31\n\x0bmax_results\x18\x03 \x01(\x0b\x32\x1c.google.protobuf.UInt32Value\x12\x12\n\npage_token\x18\x04 \x01(\t"^\n\x12ListModelsResponse\x12/\n\x06models\x18\x01 \x03(\x0b\x32\x1f.google.cloud.bigquery.v2.Model\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xfa\x05\n\x0cModelService\x12y\n\x08GetModel\x12).google.cloud.bigquery.v2.GetModelRequest\x1a\x1f.google.cloud.bigquery.v2.Model"!\xda\x41\x1eproject_id,dataset_id,model_id\x12\x8d\x01\n\nListModels\x12+.google.cloud.bigquery.v2.ListModelsRequest\x1a,.google.cloud.bigquery.v2.ListModelsResponse"$\xda\x41!project_id,dataset_id,max_results\x12\x83\x01\n\nPatchModel\x12+.google.cloud.bigquery.v2.PatchModelRequest\x1a\x1f.google.cloud.bigquery.v2.Model"\'\xda\x41$project_id,dataset_id,model_id,model\x12v\n\x0b\x44\x65leteModel\x12,.google.cloud.bigquery.v2.DeleteModelRequest\x1a\x16.google.protobuf.Empty"!\xda\x41\x1eproject_id,dataset_id,model_id\x1a\xe0\x01\xca\x41\x17\x62igquery.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-onlyBl\n\x1c\x63om.google.cloud.bigquery.v2B\nModelProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3' ), dependencies=[ + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_cloud_dot_bigquery__v2_dot_proto_dot_encryption__config__pb2.DESCRIPTOR, google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__reference__pb2.DESCRIPTOR, google_dot_cloud_dot_bigquery__v2_dot_proto_dot_standard__sql__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -72,8 +78,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=898, - serialized_end=996, + serialized_start=1132, + serialized_end=1230, ) _sym_db.RegisterEnumDescriptor(_MODEL_KMEANSENUMS_KMEANSINITIALIZATIONMETHOD) @@ -113,8 +119,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6398, - serialized_end=6513, + serialized_start=6632, + serialized_end=6747, ) _sym_db.RegisterEnumDescriptor(_MODEL_MODELTYPE) @@ -144,8 +150,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6515, - serialized_end=6594, + serialized_start=6749, + serialized_end=6828, ) _sym_db.RegisterEnumDescriptor(_MODEL_LOSSTYPE) @@ -171,8 +177,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6596, - serialized_end=6668, + serialized_start=6830, + serialized_end=6902, ) _sym_db.RegisterEnumDescriptor(_MODEL_DISTANCETYPE) @@ -207,8 +213,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6670, - serialized_end=6792, + serialized_start=6904, + serialized_end=7026, ) _sym_db.RegisterEnumDescriptor(_MODEL_DATASPLITMETHOD) @@ -234,8 +240,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6794, - serialized_end=6881, + serialized_start=7028, + serialized_end=7115, ) _sym_db.RegisterEnumDescriptor(_MODEL_LEARNRATESTRATEGY) @@ -269,8 +275,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6883, - serialized_end=6993, + serialized_start=7117, + serialized_end=7227, ) _sym_db.RegisterEnumDescriptor(_MODEL_OPTIMIZATIONSTRATEGY) @@ -284,14 +290,14 @@ fields=[], extensions=[], nested_types=[], - enum_types=[_MODEL_KMEANSENUMS_KMEANSINITIALIZATIONMETHOD], + enum_types=[_MODEL_KMEANSENUMS_KMEANSINITIALIZATIONMETHOD,], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=883, - serialized_end=996, + serialized_start=1117, + serialized_end=1230, ) _MODEL_REGRESSIONMETRICS = _descriptor.Descriptor( @@ -400,8 +406,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=999, - serialized_end=1307, + serialized_start=1233, + serialized_end=1541, ) _MODEL_AGGREGATECLASSIFICATIONMETRICS = _descriptor.Descriptor( @@ -546,8 +552,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1310, - serialized_end=1677, + serialized_start=1544, + serialized_end=1911, ) _MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX = _descriptor.Descriptor( @@ -728,8 +734,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1987, - serialized_end=2479, + serialized_start=2221, + serialized_end=2713, ) _MODEL_BINARYCLASSIFICATIONMETRICS = _descriptor.Descriptor( @@ -813,15 +819,15 @@ ), ], extensions=[], - nested_types=[_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX], + nested_types=[_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1680, - serialized_end=2479, + serialized_start=1914, + serialized_end=2713, ) _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ENTRY = _descriptor.Descriptor( @@ -876,8 +882,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2914, - serialized_end=2995, + serialized_start=3148, + serialized_end=3229, ) _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX_ROW = _descriptor.Descriptor( @@ -932,8 +938,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2998, - serialized_end=3129, + serialized_start=3232, + serialized_end=3363, ) _MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX = _descriptor.Descriptor( @@ -991,8 +997,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2736, - serialized_end=3129, + serialized_start=2970, + serialized_end=3363, ) _MODEL_MULTICLASSCLASSIFICATIONMETRICS = _descriptor.Descriptor( @@ -1040,15 +1046,15 @@ ), ], extensions=[], - nested_types=[_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX], + nested_types=[_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2482, - serialized_end=3129, + serialized_start=2716, + serialized_end=3363, ) _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT = _descriptor.Descriptor( @@ -1103,8 +1109,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3889, - serialized_end=3966, + serialized_start=4123, + serialized_end=4200, ) _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE = _descriptor.Descriptor( @@ -1131,11 +1137,11 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[ - _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT + _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT, ], enum_types=[], serialized_options=None, @@ -1143,8 +1149,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3741, - serialized_end=3966, + serialized_start=3975, + serialized_end=4200, ) _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE = _descriptor.Descriptor( @@ -1210,7 +1216,7 @@ ), ], extensions=[], - nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE], + nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1223,10 +1229,10 @@ index=0, containing_type=None, fields=[], - ) + ), ], - serialized_start=3525, - serialized_end=3975, + serialized_start=3759, + serialized_end=4209, ) _MODEL_CLUSTERINGMETRICS_CLUSTER = _descriptor.Descriptor( @@ -1292,15 +1298,15 @@ ), ], extensions=[], - nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE], + nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3352, - serialized_end=3975, + serialized_start=3586, + serialized_end=4209, ) _MODEL_CLUSTERINGMETRICS = _descriptor.Descriptor( @@ -1366,15 +1372,15 @@ ), ], extensions=[], - nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER], + nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3132, - serialized_end=3975, + serialized_start=3366, + serialized_end=4209, ) _MODEL_EVALUATIONMETRICS = _descriptor.Descriptor( @@ -1471,10 +1477,10 @@ index=0, containing_type=None, fields=[], - ) + ), ], - serialized_start=3978, - serialized_end=4383, + serialized_start=4212, + serialized_end=4617, ) _MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY = _descriptor.Descriptor( @@ -1529,8 +1535,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5819, - serialized_end=5875, + serialized_start=6053, + serialized_end=6109, ) _MODEL_TRAININGRUN_TRAININGOPTIONS = _descriptor.Descriptor( @@ -1920,15 +1926,15 @@ ), ], extensions=[], - nested_types=[_MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY], + nested_types=[_MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4694, - serialized_end=5875, + serialized_start=4928, + serialized_end=6109, ) _MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO = _descriptor.Descriptor( @@ -2001,8 +2007,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6210, - serialized_end=6349, + serialized_start=6444, + serialized_end=6583, ) _MODEL_TRAININGRUN_ITERATIONRESULT = _descriptor.Descriptor( @@ -2122,15 +2128,15 @@ ), ], extensions=[], - nested_types=[_MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO], + nested_types=[_MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5878, - serialized_end=6349, + serialized_start=6112, + serialized_end=6583, ) _MODEL_TRAININGRUN = _descriptor.Descriptor( @@ -2224,8 +2230,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4386, - serialized_end=6349, + serialized_start=4620, + serialized_end=6583, ) _MODEL_LABELSENTRY = _descriptor.Descriptor( @@ -2280,8 +2286,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6351, - serialized_end=6396, + serialized_start=6585, + serialized_end=6630, ) _MODEL = _descriptor.Descriptor( @@ -2306,7 +2312,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2324,7 +2330,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2342,7 +2348,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2360,7 +2366,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2378,7 +2384,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2396,7 +2402,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2432,7 +2438,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2450,13 +2456,31 @@ containing_type=None, is_extension=False, extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="encryption_configuration", + full_name="google.cloud.bigquery.v2.Model.encryption_configuration", + index=9, + number=17, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="model_type", full_name="google.cloud.bigquery.v2.Model.model_type", - index=9, + index=10, number=7, type=14, cpp_type=8, @@ -2468,13 +2492,13 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="training_runs", full_name="google.cloud.bigquery.v2.Model.training_runs", - index=10, + index=11, number=9, type=11, cpp_type=10, @@ -2486,13 +2510,13 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="feature_columns", full_name="google.cloud.bigquery.v2.Model.feature_columns", - index=11, + index=12, number=10, type=11, cpp_type=10, @@ -2504,13 +2528,13 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="label_columns", full_name="google.cloud.bigquery.v2.Model.label_columns", - index=12, + index=13, number=11, type=11, cpp_type=10, @@ -2522,7 +2546,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2551,8 +2575,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=327, - serialized_end=6993, + serialized_start=416, + serialized_end=7227, ) @@ -2578,7 +2602,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2596,7 +2620,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2614,7 +2638,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2626,8 +2650,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6995, - serialized_end=7070, + serialized_start=7229, + serialized_end=7319, ) @@ -2653,7 +2677,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2671,7 +2695,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2689,7 +2713,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2707,7 +2731,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2719,8 +2743,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7072, - serialized_end=7197, + serialized_start=7322, + serialized_end=7467, ) @@ -2746,7 +2770,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2764,7 +2788,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2782,7 +2806,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2794,8 +2818,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7199, - serialized_end=7277, + serialized_start=7469, + serialized_end=7562, ) @@ -2821,7 +2845,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2839,7 +2863,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2887,8 +2911,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7280, - serialized_end=7410, + serialized_start=7565, + serialized_end=7705, ) @@ -2944,8 +2968,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7412, - serialized_end=7506, + serialized_start=7707, + serialized_end=7801, ) _MODEL_KMEANSENUMS.containing_type = _MODEL @@ -3231,6 +3255,11 @@ google_dot_cloud_dot_bigquery__v2_dot_proto_dot_model__reference__pb2._MODELREFERENCE ) _MODEL.fields_by_name["labels"].message_type = _MODEL_LABELSENTRY +_MODEL.fields_by_name[ + "encryption_configuration" +].message_type = ( + google_dot_cloud_dot_bigquery__v2_dot_proto_dot_encryption__config__pb2._ENCRYPTIONCONFIGURATION +) _MODEL.fields_by_name["model_type"].enum_type = _MODEL_MODELTYPE _MODEL.fields_by_name["training_runs"].message_type = _MODEL_TRAININGRUN _MODEL.fields_by_name[ @@ -3520,7 +3549,7 @@ category_counts: Counts of all categories for the categorical feature. If there are more than ten categories, we return top ten (by count) and - return one more CategoryCount with category ‘\ *OTHER*\ ’ and + return one more CategoryCount with category "*OTHER*" and count as aggregate counts of remaining categories. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.Model.ClusteringMetrics.Cluster.FeatureValue.CategoricalValue) @@ -3789,19 +3818,19 @@ Output only. The time when this model was last modified, in millisecs since the epoch. description: - [Optional] A user-friendly description of this model. + Optional. A user-friendly description of this model. friendly_name: - [Optional] A descriptive name for this model. + Optional. A descriptive name for this model. labels: - [Optional] The labels associated with this model. You can use - these to organize and group your models. Label keys and values - can be no longer than 63 characters, can only contain - lowercase letters, numeric characters, underscores and dashes. + The labels associated with this model. You can use these to + organize and group your models. Label keys and values can be + no longer than 63 characters, can only contain lowercase + letters, numeric characters, underscores and dashes. International characters are allowed. Label values are optional. Label keys must start with a letter and each label in the list must have a different key. expiration_time: - [Optional] The time when this model expires, in milliseconds + Optional. The time when this model expires, in milliseconds since the epoch. If not present, the model will persist indefinitely. Expired models will be deleted and their storage reclaimed. The defaultTableExpirationMs property of the @@ -3810,6 +3839,10 @@ location: Output only. The geographic location where the model resides. This value is inherited from the dataset. + encryption_configuration: + Custom encryption configuration (e.g., Cloud KMS keys). This + shows the encryption configuration of the model data while + stored in BigQuery storage. model_type: Output only. Type of the model resource. training_runs: @@ -3861,11 +3894,11 @@ Attributes: project_id: - Project ID of the requested model. + Required. Project ID of the requested model. dataset_id: - Dataset ID of the requested model. + Required. Dataset ID of the requested model. model_id: - Model ID of the requested model. + Required. Model ID of the requested model. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.GetModelRequest) ), @@ -3882,15 +3915,15 @@ Attributes: project_id: - Project ID of the model to patch. + Required. Project ID of the model to patch. dataset_id: - Dataset ID of the model to patch. + Required. Dataset ID of the model to patch. model_id: - Model ID of the model to patch. + Required. Model ID of the model to patch. model: - Patched model. Follows RFC5789 patch semantics. Missing fields - are not updated. To clear a field, explicitly set to default - value. + Required. Patched model. Follows RFC5789 patch semantics. + Missing fields are not updated. To clear a field, explicitly + set to default value. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.PatchModelRequest) ), @@ -3907,11 +3940,11 @@ Attributes: project_id: - Project ID of the model to delete. + Required. Project ID of the model to delete. dataset_id: - Dataset ID of the model to delete. + Required. Dataset ID of the model to delete. model_id: - Model ID of the model to delete. + Required. Model ID of the model to delete. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.v2.DeleteModelRequest) ), @@ -3928,9 +3961,9 @@ Attributes: project_id: - Project ID of the models to list. + Required. Project ID of the models to list. dataset_id: - Dataset ID of the models to list. + Required. Dataset ID of the models to list. max_results: The maximum number of results to return in a single response page. Leverage the page tokens to iterate through the entire @@ -3969,6 +4002,30 @@ DESCRIPTOR._options = None _MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY._options = None _MODEL_LABELSENTRY._options = None +_MODEL.fields_by_name["etag"]._options = None +_MODEL.fields_by_name["model_reference"]._options = None +_MODEL.fields_by_name["creation_time"]._options = None +_MODEL.fields_by_name["last_modified_time"]._options = None +_MODEL.fields_by_name["description"]._options = None +_MODEL.fields_by_name["friendly_name"]._options = None +_MODEL.fields_by_name["expiration_time"]._options = None +_MODEL.fields_by_name["location"]._options = None +_MODEL.fields_by_name["model_type"]._options = None +_MODEL.fields_by_name["training_runs"]._options = None +_MODEL.fields_by_name["feature_columns"]._options = None +_MODEL.fields_by_name["label_columns"]._options = None +_GETMODELREQUEST.fields_by_name["project_id"]._options = None +_GETMODELREQUEST.fields_by_name["dataset_id"]._options = None +_GETMODELREQUEST.fields_by_name["model_id"]._options = None +_PATCHMODELREQUEST.fields_by_name["project_id"]._options = None +_PATCHMODELREQUEST.fields_by_name["dataset_id"]._options = None +_PATCHMODELREQUEST.fields_by_name["model_id"]._options = None +_PATCHMODELREQUEST.fields_by_name["model"]._options = None +_DELETEMODELREQUEST.fields_by_name["project_id"]._options = None +_DELETEMODELREQUEST.fields_by_name["dataset_id"]._options = None +_DELETEMODELREQUEST.fields_by_name["model_id"]._options = None +_LISTMODELSREQUEST.fields_by_name["project_id"]._options = None +_LISTMODELSREQUEST.fields_by_name["dataset_id"]._options = None _MODELSERVICE = _descriptor.ServiceDescriptor( name="ModelService", @@ -3978,8 +4035,8 @@ serialized_options=_b( "\312A\027bigquery.googleapis.com\322A\302\001https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only" ), - serialized_start=7509, - serialized_end=8128, + serialized_start=7804, + serialized_end=8566, methods=[ _descriptor.MethodDescriptor( name="GetModel", @@ -3988,7 +4045,7 @@ containing_service=None, input_type=_GETMODELREQUEST, output_type=_MODEL, - serialized_options=None, + serialized_options=_b("\332A\036project_id,dataset_id,model_id"), ), _descriptor.MethodDescriptor( name="ListModels", @@ -3997,7 +4054,7 @@ containing_service=None, input_type=_LISTMODELSREQUEST, output_type=_LISTMODELSRESPONSE, - serialized_options=None, + serialized_options=_b("\332A!project_id,dataset_id,max_results"), ), _descriptor.MethodDescriptor( name="PatchModel", @@ -4006,7 +4063,7 @@ containing_service=None, input_type=_PATCHMODELREQUEST, output_type=_MODEL, - serialized_options=None, + serialized_options=_b("\332A$project_id,dataset_id,model_id,model"), ), _descriptor.MethodDescriptor( name="DeleteModel", @@ -4015,7 +4072,7 @@ containing_service=None, input_type=_DELETEMODELREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=None, + serialized_options=_b("\332A\036project_id,dataset_id,model_id"), ), ], ) diff --git a/bigquery/google/cloud/bigquery_v2/proto/model_reference.proto b/bigquery/google/cloud/bigquery_v2/proto/model_reference.proto index f436659abd4f..fadd175146b0 100644 --- a/bigquery/google/cloud/bigquery_v2/proto/model_reference.proto +++ b/bigquery/google/cloud/bigquery_v2/proto/model_reference.proto @@ -17,6 +17,7 @@ syntax = "proto3"; package google.cloud.bigquery.v2; +import "google/api/field_behavior.proto"; import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"; @@ -25,14 +26,14 @@ option java_package = "com.google.cloud.bigquery.v2"; // Id path of a model. message ModelReference { - // [Required] The ID of the project containing this model. - string project_id = 1; + // Required. The ID of the project containing this model. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // [Required] The ID of the dataset containing this model. - string dataset_id = 2; + // Required. The ID of the dataset containing this model. + string dataset_id = 2 [(google.api.field_behavior) = REQUIRED]; - // [Required] The ID of the model. The ID must contain only + // Required. The ID of the model. The ID must contain only // letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum // length is 1,024 characters. - string model_id = 3; + string model_id = 3 [(google.api.field_behavior) = REQUIRED]; } diff --git a/bigquery/google/cloud/bigquery_v2/proto/model_reference_pb2.py b/bigquery/google/cloud/bigquery_v2/proto/model_reference_pb2.py index 94f6116b5a44..01e6e29522a5 100644 --- a/bigquery/google/cloud/bigquery_v2/proto/model_reference_pb2.py +++ b/bigquery/google/cloud/bigquery_v2/proto/model_reference_pb2.py @@ -15,6 +15,7 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 @@ -26,9 +27,12 @@ "\n\034com.google.cloud.bigquery.v2B\023ModelReferenceProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery" ), serialized_pb=_b( - '\n4google/cloud/bigquery_v2/proto/model_reference.proto\x12\x18google.cloud.bigquery.v2\x1a\x1cgoogle/api/annotations.proto"J\n\x0eModelReference\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x12\x10\n\x08model_id\x18\x03 \x01(\tBu\n\x1c\x63om.google.cloud.bigquery.v2B\x13ModelReferenceProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3' + '\n4google/cloud/bigquery_v2/proto/model_reference.proto\x12\x18google.cloud.bigquery.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"Y\n\x0eModelReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ndataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x15\n\x08model_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x42u\n\x1c\x63om.google.cloud.bigquery.v2B\x13ModelReferenceProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + ], ) @@ -54,7 +58,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -72,7 +76,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -90,7 +94,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -102,8 +106,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=112, - serialized_end=186, + serialized_start=145, + serialized_end=234, ) DESCRIPTOR.message_types_by_name["ModelReference"] = _MODELREFERENCE @@ -120,11 +124,11 @@ Attributes: project_id: - [Required] The ID of the project containing this model. + Required. The ID of the project containing this model. dataset_id: - [Required] The ID of the dataset containing this model. + Required. The ID of the dataset containing this model. model_id: - [Required] The ID of the model. The ID must contain only + Required. The ID of the model. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (\_). The maximum length is 1,024 characters. """, @@ -135,4 +139,7 @@ DESCRIPTOR._options = None +_MODELREFERENCE.fields_by_name["project_id"]._options = None +_MODELREFERENCE.fields_by_name["dataset_id"]._options = None +_MODELREFERENCE.fields_by_name["model_id"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/bigquery/google/cloud/bigquery_v2/proto/standard_sql.proto b/bigquery/google/cloud/bigquery_v2/proto/standard_sql.proto index 98173092ff71..ff69dfc4eb30 100644 --- a/bigquery/google/cloud/bigquery_v2/proto/standard_sql.proto +++ b/bigquery/google/cloud/bigquery_v2/proto/standard_sql.proto @@ -17,6 +17,7 @@ syntax = "proto3"; package google.cloud.bigquery.v2; +import "google/api/field_behavior.proto"; import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery"; @@ -82,7 +83,7 @@ message StandardSqlDataType { // Required. The top level type of this field. // Can be any standard SQL data type (e.g., "INT64", "DATE", "ARRAY"). - TypeKind type_kind = 1; + TypeKind type_kind = 1 [(google.api.field_behavior) = REQUIRED]; oneof sub_type { // The type of the array's elements, if type_kind = "ARRAY". @@ -96,12 +97,12 @@ message StandardSqlDataType { // A field or a column. message StandardSqlField { // Optional. The name of this field. Can be absent for struct fields. - string name = 1; + string name = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The type of this parameter. Absent if not explicitly // specified (e.g., CREATE FUNCTION statement can omit the return type; // in this case the output parameter does not have this "type" field). - StandardSqlDataType type = 2; + StandardSqlDataType type = 2 [(google.api.field_behavior) = OPTIONAL]; } message StandardSqlStructType { diff --git a/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2.py b/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2.py index 91f1554f7f80..3b394b8bf10e 100644 --- a/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2.py +++ b/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2.py @@ -15,6 +15,7 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 @@ -26,9 +27,12 @@ "\n\034com.google.cloud.bigquery.v2B\020StandardSqlProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigquery" ), serialized_pb=_b( - '\n1google/cloud/bigquery_v2/proto/standard_sql.proto\x12\x18google.cloud.bigquery.v2\x1a\x1cgoogle/api/annotations.proto"\xc6\x03\n\x13StandardSqlDataType\x12I\n\ttype_kind\x18\x01 \x01(\x0e\x32\x36.google.cloud.bigquery.v2.StandardSqlDataType.TypeKind\x12K\n\x12\x61rray_element_type\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.v2.StandardSqlDataTypeH\x00\x12\x46\n\x0bstruct_type\x18\x03 \x01(\x0b\x32/.google.cloud.bigquery.v2.StandardSqlStructTypeH\x00"\xc2\x01\n\x08TypeKind\x12\x19\n\x15TYPE_KIND_UNSPECIFIED\x10\x00\x12\t\n\x05INT64\x10\x02\x12\x08\n\x04\x42OOL\x10\x05\x12\x0b\n\x07\x46LOAT64\x10\x07\x12\n\n\x06STRING\x10\x08\x12\t\n\x05\x42YTES\x10\t\x12\r\n\tTIMESTAMP\x10\x13\x12\x08\n\x04\x44\x41TE\x10\n\x12\x08\n\x04TIME\x10\x14\x12\x0c\n\x08\x44\x41TETIME\x10\x15\x12\r\n\tGEOGRAPHY\x10\x16\x12\x0b\n\x07NUMERIC\x10\x17\x12\t\n\x05\x41RRAY\x10\x10\x12\n\n\x06STRUCT\x10\x11\x42\n\n\x08sub_type"]\n\x10StandardSqlField\x12\x0c\n\x04name\x18\x01 \x01(\t\x12;\n\x04type\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.v2.StandardSqlDataType"S\n\x15StandardSqlStructType\x12:\n\x06\x66ields\x18\x01 \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldBr\n\x1c\x63om.google.cloud.bigquery.v2B\x10StandardSqlProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3' + '\n1google/cloud/bigquery_v2/proto/standard_sql.proto\x12\x18google.cloud.bigquery.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"\xcb\x03\n\x13StandardSqlDataType\x12N\n\ttype_kind\x18\x01 \x01(\x0e\x32\x36.google.cloud.bigquery.v2.StandardSqlDataType.TypeKindB\x03\xe0\x41\x02\x12K\n\x12\x61rray_element_type\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.v2.StandardSqlDataTypeH\x00\x12\x46\n\x0bstruct_type\x18\x03 \x01(\x0b\x32/.google.cloud.bigquery.v2.StandardSqlStructTypeH\x00"\xc2\x01\n\x08TypeKind\x12\x19\n\x15TYPE_KIND_UNSPECIFIED\x10\x00\x12\t\n\x05INT64\x10\x02\x12\x08\n\x04\x42OOL\x10\x05\x12\x0b\n\x07\x46LOAT64\x10\x07\x12\n\n\x06STRING\x10\x08\x12\t\n\x05\x42YTES\x10\t\x12\r\n\tTIMESTAMP\x10\x13\x12\x08\n\x04\x44\x41TE\x10\n\x12\x08\n\x04TIME\x10\x14\x12\x0c\n\x08\x44\x41TETIME\x10\x15\x12\r\n\tGEOGRAPHY\x10\x16\x12\x0b\n\x07NUMERIC\x10\x17\x12\t\n\x05\x41RRAY\x10\x10\x12\n\n\x06STRUCT\x10\x11\x42\n\n\x08sub_type"g\n\x10StandardSqlField\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12@\n\x04type\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.v2.StandardSqlDataTypeB\x03\xe0\x41\x01"S\n\x15StandardSqlStructType\x12:\n\x06\x66ields\x18\x01 \x03(\x0b\x32*.google.cloud.bigquery.v2.StandardSqlFieldBr\n\x1c\x63om.google.cloud.bigquery.v2B\x10StandardSqlProtoZ@google.golang.org/genproto/googleapis/cloud/bigquery/v2;bigqueryb\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + ], ) @@ -87,8 +91,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=358, - serialized_end=552, + serialized_start=396, + serialized_end=590, ) _sym_db.RegisterEnumDescriptor(_STANDARDSQLDATATYPE_TYPEKIND) @@ -115,7 +119,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -157,7 +161,7 @@ ], extensions=[], nested_types=[], - enum_types=[_STANDARDSQLDATATYPE_TYPEKIND], + enum_types=[_STANDARDSQLDATATYPE_TYPEKIND,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -169,10 +173,10 @@ index=0, containing_type=None, fields=[], - ) + ), ], - serialized_start=110, - serialized_end=564, + serialized_start=143, + serialized_end=602, ) @@ -198,7 +202,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -216,7 +220,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -228,8 +232,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=566, - serialized_end=659, + serialized_start=604, + serialized_end=707, ) @@ -257,7 +261,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -267,8 +271,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=661, - serialized_end=744, + serialized_start=709, + serialized_end=792, ) _STANDARDSQLDATATYPE.fields_by_name[ @@ -363,4 +367,7 @@ DESCRIPTOR._options = None +_STANDARDSQLDATATYPE.fields_by_name["type_kind"]._options = None +_STANDARDSQLFIELD.fields_by_name["name"]._options = None +_STANDARDSQLFIELD.fields_by_name["type"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/bigquery/google/cloud/bigquery_v2/types.py b/bigquery/google/cloud/bigquery_v2/types.py index 36fc2d6bfbd2..ee852364a10f 100644 --- a/bigquery/google/cloud/bigquery_v2/types.py +++ b/bigquery/google/cloud/bigquery_v2/types.py @@ -20,6 +20,7 @@ from google.api_core.protobuf_helpers import get_messages +from google.cloud.bigquery_v2.proto import encryption_config_pb2 from google.cloud.bigquery_v2.proto import model_pb2 from google.cloud.bigquery_v2.proto import model_reference_pb2 from google.cloud.bigquery_v2.proto import standard_sql_pb2 @@ -28,9 +29,18 @@ from google.protobuf import wrappers_pb2 -_shared_modules = [empty_pb2, timestamp_pb2, wrappers_pb2] +_shared_modules = [ + empty_pb2, + timestamp_pb2, + wrappers_pb2, +] -_local_modules = [model_pb2, model_reference_pb2, standard_sql_pb2] +_local_modules = [ + encryption_config_pb2, + model_pb2, + model_reference_pb2, + standard_sql_pb2, +] names = [] diff --git a/bigquery/noxfile.py b/bigquery/noxfile.py index 37611a5ce296..a6d8094ebbc3 100644 --- a/bigquery/noxfile.py +++ b/bigquery/noxfile.py @@ -20,11 +20,7 @@ import nox -LOCAL_DEPS = ( - os.path.join("..", "api_core[grpc]"), - os.path.join("..", "core"), - os.path.join("..", "test_utils"), -) +LOCAL_DEPS = (os.path.join("..", "api_core[grpc]"), os.path.join("..", "core")) BLACK_PATHS = ("docs", "google", "samples", "tests", "noxfile.py", "setup.py") @@ -42,6 +38,7 @@ def default(session): for local_dep in LOCAL_DEPS: session.install("-e", local_dep) + session.install("-e", os.path.join("..", "test_utils")) dev_install = ".[all]" session.install("-e", dev_install) @@ -150,6 +147,7 @@ def lint(session): session.install("-e", ".") session.run("flake8", os.path.join("google", "cloud", "bigquery")) session.run("flake8", "tests") + session.run("flake8", os.path.join("docs", "samples")) session.run("flake8", os.path.join("docs", "snippets.py")) session.run("black", "--check", *BLACK_PATHS) diff --git a/bigquery/samples/add_empty_column.py b/bigquery/samples/add_empty_column.py index eb84037598d3..bd531898eb29 100644 --- a/bigquery/samples/add_empty_column.py +++ b/bigquery/samples/add_empty_column.py @@ -21,17 +21,18 @@ def add_empty_column(client, table_id): # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() - # TODO(developer): Set table_id to the ID of the table to add an empty column. + # TODO(developer): Set table_id to the ID of the table + # to add an empty column. # table_id = "your-project.your_dataset.your_table_name" - table = client.get_table(table_id) + table = client.get_table(table_id) # Make an API request. original_schema = table.schema - new_schema = original_schema[:] # creates a copy of the schema + new_schema = original_schema[:] # Creates a copy of the schema. new_schema.append(bigquery.SchemaField("phone", "STRING")) table.schema = new_schema - table = client.update_table(table, ["schema"]) # API request + table = client.update_table(table, ["schema"]) # Make an API request. if len(table.schema) == len(original_schema) + 1 == len(new_schema): print("A new column has been added.") diff --git a/bigquery/samples/browse_table_data.py b/bigquery/samples/browse_table_data.py index dd6c572cab6d..78d1d351a7a7 100644 --- a/bigquery/samples/browse_table_data.py +++ b/bigquery/samples/browse_table_data.py @@ -26,7 +26,7 @@ def browse_table_data(client, table_id): # table_id = "your-project.your_dataset.your_table_name" # Download all rows from a table. - rows_iter = client.list_rows(table_id) + rows_iter = client.list_rows(table_id) # Make an API request. # Iterate over rows to make the API requests to fetch row data. rows = list(rows_iter) @@ -38,10 +38,18 @@ def browse_table_data(client, table_id): print("Downloaded {} rows from table {}".format(len(rows), table_id)) # Specify selected fields to limit the results to certain columns. - table = client.get_table(table_id) - fields = table.schema[:2] # first two columns + table = client.get_table(table_id) # Make an API request. + fields = table.schema[:2] # First two columns. rows_iter = client.list_rows(table_id, selected_fields=fields, max_results=10) rows = list(rows_iter) print("Selected {} columns from table {}.".format(len(rows_iter.schema), table_id)) print("Downloaded {} rows from table {}".format(len(rows), table_id)) + + # Print row data in tabular format. + rows = client.list_rows(table, max_results=10) + format_string = "{!s:<16} " * len(rows.schema) + field_names = [field.name for field in rows.schema] + print(format_string.format(*field_names)) # Prints column headers. + for row in rows: + print(format_string.format(*row)) # Prints row data. # [END bigquery_browse_table] diff --git a/bigquery/samples/client_list_jobs.py b/bigquery/samples/client_list_jobs.py new file mode 100644 index 000000000000..08eb4fbd99ef --- /dev/null +++ b/bigquery/samples/client_list_jobs.py @@ -0,0 +1,50 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def client_list_jobs(client): + + # [START bigquery_list_jobs] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + import datetime + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # List the 10 most recent jobs in reverse chronological order. + # Omit the max_results parameter to list jobs from the past 6 months. + print("Last 10 jobs:") + for job in client.list_jobs(max_results=10): # API request(s) + print("{}".format(job.job_id)) + + # The following are examples of additional optional parameters: + + # Use min_creation_time and/or max_creation_time to specify a time window. + print("Jobs from the last ten minutes:") + ten_mins_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=10) + for job in client.list_jobs(min_creation_time=ten_mins_ago): + print("{}".format(job.job_id)) + + # Use all_users to include jobs run by all users in the project. + print("Last 10 jobs run by all users:") + for job in client.list_jobs(max_results=10, all_users=True): + print("{} run by user: {}".format(job.job_id, job.user_email)) + + # Use state_filter to filter by job state. + print("Last 10 jobs done:") + for job in client.list_jobs(max_results=10, state_filter="DONE"): + print("{}".format(job.job_id)) + # [END bigquery_list_jobs] diff --git a/bigquery/samples/client_query.py b/bigquery/samples/client_query.py new file mode 100644 index 000000000000..9dccfd38cbcf --- /dev/null +++ b/bigquery/samples/client_query.py @@ -0,0 +1,41 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def client_query(client): + + # [START bigquery_query] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + query = """ + SELECT name, SUM(number) as total_people + FROM `bigquery-public-data.usa_names.usa_1910_2013` + WHERE state = 'TX' + GROUP BY name, state + ORDER BY total_people DESC + LIMIT 20 + """ + query_job = client.query( + query, location="US" # Must match the destination dataset(s) location. + ) # Make an API request. + + print("The query data:") + for row in query_job: + # Row values can be accessed by field name or index. + print("name={}, count={}".format(row[0], row["total_people"])) + # [END bigquery_query] diff --git a/bigquery/samples/copy_table.py b/bigquery/samples/copy_table.py new file mode 100644 index 000000000000..f6ebd91470eb --- /dev/null +++ b/bigquery/samples/copy_table.py @@ -0,0 +1,39 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def copy_table(client, source_table_id, destination_table_id): + + # [START bigquery_copy_table] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set source_table_id to the ID of the original table. + # source_table_id = "your-project.source_dataset.source_table" + + # TODO(developer): Set destination_table_id to the ID of the destination table. + # destination_table_id = "your-project.destination_dataset.destination_table" + + job = client.copy_table( + source_table_id, + destination_table_id, + location="US", # Must match the source and destination tables location. + ) + job.result() # Waits for job to complete. + + print("A copy of the table created.") + # [END bigquery_copy_table] diff --git a/bigquery/samples/create_dataset.py b/bigquery/samples/create_dataset.py index 89ca9d38f5f3..3d64473a2321 100644 --- a/bigquery/samples/create_dataset.py +++ b/bigquery/samples/create_dataset.py @@ -33,6 +33,6 @@ def create_dataset(client, dataset_id): # Send the dataset to the API for creation. # Raises google.api_core.exceptions.Conflict if the Dataset already # exists within the project. - dataset = client.create_dataset(dataset) # API request + dataset = client.create_dataset(dataset) # Make an API request. print("Created dataset {}.{}".format(client.project, dataset.dataset_id)) # [END bigquery_create_dataset] diff --git a/bigquery/samples/create_job.py b/bigquery/samples/create_job.py index 24bb85510598..4f7f27a8e668 100644 --- a/bigquery/samples/create_job.py +++ b/bigquery/samples/create_job.py @@ -33,7 +33,7 @@ def create_job(client): # The client libraries automatically generate a job ID. Override the # generated ID with either the job_id_prefix or job_id parameters. job_id_prefix="code_sample_", - ) # API request + ) # Make an API request. print("Started job: {}".format(query_job.job_id)) # [END bigquery_create_job] diff --git a/bigquery/samples/create_routine.py b/bigquery/samples/create_routine.py index c08ec4799a3e..424ee4ef5553 100644 --- a/bigquery/samples/create_routine.py +++ b/bigquery/samples/create_routine.py @@ -40,7 +40,7 @@ def create_routine(client, routine_id): ], ) - routine = client.create_routine(routine) + routine = client.create_routine(routine) # Make an API request. print("Created routine {}".format(routine.reference)) # [END bigquery_create_routine] diff --git a/bigquery/samples/create_routine_ddl.py b/bigquery/samples/create_routine_ddl.py index a4ae3318e7b4..eb5af0388503 100644 --- a/bigquery/samples/create_routine_ddl.py +++ b/bigquery/samples/create_routine_ddl.py @@ -34,12 +34,8 @@ def create_routine_ddl(client, routine_id): """.format( routine_id ) - - # Initiate the query to create the routine. - query_job = client.query(sql) - - # Wait for the query to complete. - query_job.result() + query_job = client.query(sql) # Make an API request. + query_job.result() # Wait for the job to complete. print("Created routine {}".format(query_job.ddl_target_routine)) # [END bigquery_create_routine_ddl] diff --git a/bigquery/samples/create_table.py b/bigquery/samples/create_table.py index 2a6e98fc72f6..ae26c57fed00 100644 --- a/bigquery/samples/create_table.py +++ b/bigquery/samples/create_table.py @@ -21,7 +21,7 @@ def create_table(client, table_id): # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() - # TODO(developer): Set table_id to the ID of the table to create + # TODO(developer): Set table_id to the ID of the table to create. # table_id = "your-project.your_dataset.your_table_name" schema = [ @@ -30,7 +30,7 @@ def create_table(client, table_id): ] table = bigquery.Table(table_id, schema=schema) - table = client.create_table(table) # API request + table = client.create_table(table) # Make an API request. print( "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id) ) diff --git a/bigquery/samples/create_table_range_partitioned.py b/bigquery/samples/create_table_range_partitioned.py new file mode 100644 index 000000000000..f9da09cff847 --- /dev/null +++ b/bigquery/samples/create_table_range_partitioned.py @@ -0,0 +1,45 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_table_range_partitioned(client, table_id): + + # [START bigquery_create_table_range_partitioned] + from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set table_id to the ID of the table to create. + # table_id = "your-project.your_dataset.your_table_name" + + schema = [ + bigquery.SchemaField("full_name", "STRING"), + bigquery.SchemaField("city", "STRING"), + bigquery.SchemaField("zipcode", "INTEGER"), + ] + + table = bigquery.Table(table_id, schema=schema) + table.range_partitioning = bigquery.RangePartitioning( + # To use integer range partitioning, select a top-level REQUIRED / + # NULLABLE column with INTEGER / INT64 data type. + field="zipcode", + range_=bigquery.PartitionRange(start=0, end=100000, interval=10), + ) + table = client.create_table(table) # Make an API request. + print( + "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id) + ) + # [END bigquery_create_table_range_partitioned] + return table diff --git a/bigquery/samples/dataset_exists.py b/bigquery/samples/dataset_exists.py index 46cf26a623bf..b8b53b8a4580 100644 --- a/bigquery/samples/dataset_exists.py +++ b/bigquery/samples/dataset_exists.py @@ -22,7 +22,7 @@ def dataset_exists(client, dataset_id): # dataset_id = "your-project.your_dataset" try: - client.get_dataset(dataset_id) + client.get_dataset(dataset_id) # Make an API request. print("Dataset {} already exists".format(dataset_id)) except NotFound: print("Dataset {} is not found".format(dataset_id)) diff --git a/bigquery/samples/delete_dataset.py b/bigquery/samples/delete_dataset.py index 6cde1b6b2d27..8ce95d953392 100644 --- a/bigquery/samples/delete_dataset.py +++ b/bigquery/samples/delete_dataset.py @@ -25,9 +25,11 @@ def delete_dataset(client, dataset_id): # TODO(developer): Set model_id to the ID of the model to fetch. # dataset_id = 'your-project.your_dataset' - # Use the delete_contents parameter to delete a dataset and its contents + # Use the delete_contents parameter to delete a dataset and its contents. # Use the not_found_ok parameter to not receive an error if the dataset has already been deleted. - client.delete_dataset(dataset_id, delete_contents=True, not_found_ok=True) + client.delete_dataset( + dataset_id, delete_contents=True, not_found_ok=True + ) # Make an API request. print("Deleted dataset '{}'.".format(dataset_id)) # [END bigquery_delete_dataset] diff --git a/bigquery/samples/delete_dataset_labels.py b/bigquery/samples/delete_dataset_labels.py index 33ff5c0f2620..9e6493694ddc 100644 --- a/bigquery/samples/delete_dataset_labels.py +++ b/bigquery/samples/delete_dataset_labels.py @@ -25,12 +25,12 @@ def delete_dataset_labels(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # Make an API request. - # To delete a label from a dataset, set its value to None + # To delete a label from a dataset, set its value to None. dataset.labels["color"] = None - dataset = client.update_dataset(dataset, ["labels"]) + dataset = client.update_dataset(dataset, ["labels"]) # Make an API request. print("Labels deleted from {}".format(dataset_id)) # [END bigquery_delete_label_dataset] return dataset diff --git a/bigquery/samples/delete_model.py b/bigquery/samples/delete_model.py index 5ac4305bc97e..b6f32a59ebd9 100644 --- a/bigquery/samples/delete_model.py +++ b/bigquery/samples/delete_model.py @@ -26,7 +26,7 @@ def delete_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - client.delete_model(model_id) + client.delete_model(model_id) # Make an API request. print("Deleted model '{}'.".format(model_id)) # [END bigquery_delete_model] diff --git a/bigquery/samples/delete_routine.py b/bigquery/samples/delete_routine.py index c0164b415008..c20b49837b75 100644 --- a/bigquery/samples/delete_routine.py +++ b/bigquery/samples/delete_routine.py @@ -25,7 +25,7 @@ def delete_routine(client, routine_id): # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" - client.delete_routine(routine_id) + client.delete_routine(routine_id) # Make an API request. print("Deleted routine {}.".format(routine_id)) # [END bigquery_delete_routine] diff --git a/bigquery/samples/delete_table.py b/bigquery/samples/delete_table.py index dcdd3d855b2e..b83a92890b09 100644 --- a/bigquery/samples/delete_table.py +++ b/bigquery/samples/delete_table.py @@ -26,7 +26,7 @@ def delete_table(client, table_id): # table_id = 'your-project.your_dataset.your_table' # If the table does not exist, delete_table raises - # google.api_core.exceptions.NotFound unless not_found_ok is True - client.delete_table(table_id, not_found_ok=True) + # google.api_core.exceptions.NotFound unless not_found_ok is True. + client.delete_table(table_id, not_found_ok=True) # Make an API request. print("Deleted table '{}'.".format(table_id)) # [END bigquery_delete_table] diff --git a/bigquery/samples/get_dataset.py b/bigquery/samples/get_dataset.py index 5586c2b95ebb..bb3d4a0d4c40 100644 --- a/bigquery/samples/get_dataset.py +++ b/bigquery/samples/get_dataset.py @@ -25,7 +25,7 @@ def get_dataset(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) friendly_name = dataset.friendly_name @@ -35,7 +35,7 @@ def get_dataset(client, dataset_id): ) ) - # View dataset properties + # View dataset properties. print("Description: {}".format(dataset.description)) print("Labels:") labels = dataset.labels @@ -45,9 +45,9 @@ def get_dataset(client, dataset_id): else: print("\tDataset has no labels defined.") - # View tables in dataset + # View tables in dataset. print("Tables:") - tables = list(client.list_tables(dataset)) # API request(s) + tables = list(client.list_tables(dataset)) # Make an API request(s). if tables: for table in tables: print("\t{}".format(table.table_id)) diff --git a/bigquery/samples/get_dataset_labels.py b/bigquery/samples/get_dataset_labels.py index 2f21723a550b..411607f84664 100644 --- a/bigquery/samples/get_dataset_labels.py +++ b/bigquery/samples/get_dataset_labels.py @@ -25,9 +25,9 @@ def get_dataset_labels(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # Make an API request. - # View dataset labels + # View dataset labels. print("Dataset ID: {}".format(dataset_id)) print("Labels:") if dataset.labels: diff --git a/bigquery/samples/get_model.py b/bigquery/samples/get_model.py index 69986733c50b..0ebd59c9d067 100644 --- a/bigquery/samples/get_model.py +++ b/bigquery/samples/get_model.py @@ -26,7 +26,7 @@ def get_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - model = client.get_model(model_id) + model = client.get_model(model_id) # Make an API request. full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) friendly_name = model.friendly_name diff --git a/bigquery/samples/get_routine.py b/bigquery/samples/get_routine.py index d9035c282438..da4e89f57f19 100644 --- a/bigquery/samples/get_routine.py +++ b/bigquery/samples/get_routine.py @@ -25,15 +25,15 @@ def get_routine(client, routine_id): # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" - routine = client.get_routine(routine_id) + routine = client.get_routine(routine_id) # Make an API request. - print("Routine `{}`:".format(routine.reference)) - print(" Type: '{}'".format(routine.type_)) - print(" Language: '{}'".format(routine.language)) - print(" Arguments:") + print("Routine '{}':".format(routine.reference)) + print("\tType: '{}'".format(routine.type_)) + print("\tLanguage: '{}'".format(routine.language)) + print("\tArguments:") for argument in routine.arguments: - print(" Name: '{}'".format(argument.name)) - print(" Type: '{}'".format(argument.type_)) + print("\t\tName: '{}'".format(argument.name)) + print("\t\tType: '{}'".format(argument.data_type)) # [END bigquery_get_routine] return routine diff --git a/bigquery/samples/get_table.py b/bigquery/samples/get_table.py index e49e032f6e23..201b8808a846 100644 --- a/bigquery/samples/get_table.py +++ b/bigquery/samples/get_table.py @@ -25,13 +25,12 @@ def get_table(client, table_id): # TODO(developer): Set table_id to the ID of the model to fetch. # table_id = 'your-project.your_dataset.your_table' - table = client.get_table(table_id) + table = client.get_table(table_id) # Make an API request. + # View table properties print( "Got table '{}.{}.{}'.".format(table.project, table.dataset_id, table.table_id) ) - - # View table properties print("Table schema: {}".format(table.schema)) print("Table description: {}".format(table.description)) print("Table has {} rows".format(table.num_rows)) diff --git a/bigquery/samples/label_dataset.py b/bigquery/samples/label_dataset.py index 7840ea25a63f..019b2aa374a0 100644 --- a/bigquery/samples/label_dataset.py +++ b/bigquery/samples/label_dataset.py @@ -25,9 +25,9 @@ def label_dataset(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # Make an API request. dataset.labels = {"color": "green"} - dataset = client.update_dataset(dataset, ["labels"]) + dataset = client.update_dataset(dataset, ["labels"]) # Make an API request. print("Labels added to {}".format(dataset_id)) # [END bigquery_label_dataset] diff --git a/bigquery/samples/list_datasets.py b/bigquery/samples/list_datasets.py index b57aad1b5e7b..77ae8c785d22 100644 --- a/bigquery/samples/list_datasets.py +++ b/bigquery/samples/list_datasets.py @@ -22,12 +22,12 @@ def list_datasets(client): # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() - datasets = list(client.list_datasets()) + datasets = list(client.list_datasets()) # Make an API request. project = client.project if datasets: print("Datasets in project {}:".format(project)) - for dataset in datasets: # API request(s) + for dataset in datasets: print("\t{}".format(dataset.dataset_id)) else: print("{} project does not contain any datasets.".format(project)) diff --git a/bigquery/samples/list_datasets_by_label.py b/bigquery/samples/list_datasets_by_label.py index 8b574b1110eb..9fa939ad0c19 100644 --- a/bigquery/samples/list_datasets_by_label.py +++ b/bigquery/samples/list_datasets_by_label.py @@ -23,7 +23,7 @@ def list_datasets_by_label(client): # client = bigquery.Client() label_filter = "labels.color:green" - datasets = list(client.list_datasets(filter=label_filter)) + datasets = list(client.list_datasets(filter=label_filter)) # Make an API request. if datasets: print("Datasets filtered by {}:".format(label_filter)) diff --git a/bigquery/samples/list_models.py b/bigquery/samples/list_models.py index 5b4d21799b28..a2477ffc795b 100644 --- a/bigquery/samples/list_models.py +++ b/bigquery/samples/list_models.py @@ -27,7 +27,7 @@ def list_models(client, dataset_id): # the models you are listing. # dataset_id = 'your-project.your_dataset' - models = client.list_models(dataset_id) + models = client.list_models(dataset_id) # Make an API request. print("Models contained in '{}':".format(dataset_id)) for model in models: diff --git a/bigquery/samples/list_routines.py b/bigquery/samples/list_routines.py index 1ae4f441cde1..5eaad0cec8f4 100644 --- a/bigquery/samples/list_routines.py +++ b/bigquery/samples/list_routines.py @@ -26,7 +26,7 @@ def list_routines(client, dataset_id): # the routines you are listing. # dataset_id = 'your-project.your_dataset' - routines = client.list_routines(dataset_id) + routines = client.list_routines(dataset_id) # Make an API request. print("Routines contained in dataset {}:".format(dataset_id)) for routine in routines: diff --git a/bigquery/samples/list_tables.py b/bigquery/samples/list_tables.py index 2057f2d73891..d7576616e191 100644 --- a/bigquery/samples/list_tables.py +++ b/bigquery/samples/list_tables.py @@ -26,7 +26,7 @@ def list_tables(client, dataset_id): # the tables you are listing. # dataset_id = 'your-project.your_dataset' - tables = client.list_tables(dataset_id) + tables = client.list_tables(dataset_id) # Make an API request. print("Tables contained in '{}':".format(dataset_id)) for table in tables: diff --git a/bigquery/samples/load_table_dataframe.py b/bigquery/samples/load_table_dataframe.py index 69eeb6ef89d0..ea6fe5d02384 100644 --- a/bigquery/samples/load_table_dataframe.py +++ b/bigquery/samples/load_table_dataframe.py @@ -14,8 +14,10 @@ def load_table_dataframe(client, table_id): + # [START bigquery_load_table_dataframe] from google.cloud import bigquery + import pandas # TODO(developer): Construct a BigQuery client object. @@ -59,11 +61,14 @@ def load_table_dataframe(client, table_id): ) job = client.load_table_from_dataframe( - dataframe, table_id, job_config=job_config, location="US" - ) - job.result() # Waits for table load to complete. + dataframe, + table_id, + job_config=job_config, + location="US", # Must match the destination dataset location. + ) # Make an API request. + job.result() # Wait for the job to complete. - table = client.get_table(table_id) + table = client.get_table(table_id) # Make an API request. print( "Loaded {} rows and {} columns to {}".format( table.num_rows, len(table.schema), table_id diff --git a/bigquery/samples/query_external_sheets_permanent_table.py b/bigquery/samples/query_external_sheets_permanent_table.py new file mode 100644 index 000000000000..ce9b1c928782 --- /dev/null +++ b/bigquery/samples/query_external_sheets_permanent_table.py @@ -0,0 +1,73 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def query_external_sheets_permanent_table(dataset_id): + + # [START bigquery_query_external_sheets_perm] + from google.cloud import bigquery + import google.auth + + # Create credentials with Drive & BigQuery API scopes. + # Both APIs must be enabled for your project before running this code. + credentials, project = google.auth.default( + scopes=[ + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/bigquery", + ] + ) + + # TODO(developer): Construct a BigQuery client object. + client = bigquery.Client(credentials=credentials, project=project) + + # TODO(developer): Set dataset_id to the ID of the dataset to fetch. + # dataset_id = "your-project.your_dataset" + + # Configure the external data source. + dataset = client.get_dataset(dataset_id) + table_id = "us_states" + schema = [ + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), + ] + table = bigquery.Table(dataset.table(table_id), schema=schema) + external_config = bigquery.ExternalConfig("GOOGLE_SHEETS") + # Use a shareable link or grant viewing access to the email address you + # used to authenticate with BigQuery (this example Sheet is public). + sheet_url = ( + "https://docs.google.com/spreadsheets" + "/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing" + ) + external_config.source_uris = [sheet_url] + external_config.options.skip_leading_rows = 1 # Optionally skip header row. + external_config.options.range = ( + "us-states!A20:B49" # Optionally set range of the sheet to query from. + ) + table.external_data_configuration = external_config + + # Create a permanent table linked to the Sheets file. + table = client.create_table(table) # Make an API request. + + # Example query to find states starting with "W". + sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format(dataset_id, table_id) + query_job = client.query(sql) # Make an API request. + + # Wait for the query to complete. + w_states = list(query_job) + print( + "There are {} states with names starting with W in the selected range.".format( + len(w_states) + ) + ) + # [END bigquery_query_external_sheets_perm] diff --git a/bigquery/samples/query_external_sheets_temporary_table.py b/bigquery/samples/query_external_sheets_temporary_table.py new file mode 100644 index 000000000000..e89b6efab362 --- /dev/null +++ b/bigquery/samples/query_external_sheets_temporary_table.py @@ -0,0 +1,69 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def query_external_sheets_temporary_table(): + + # [START bigquery_query_external_sheets_temp] + # [START bigquery_auth_drive_scope] + from google.cloud import bigquery + import google.auth + + # Create credentials with Drive & BigQuery API scopes. + # Both APIs must be enabled for your project before running this code. + credentials, project = google.auth.default( + scopes=[ + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/bigquery", + ] + ) + + # TODO(developer): Construct a BigQuery client object. + client = bigquery.Client(credentials=credentials, project=project) + # [END bigquery_auth_drive_scope] + + # Configure the external data source and query job. + external_config = bigquery.ExternalConfig("GOOGLE_SHEETS") + + # Use a shareable link or grant viewing access to the email address you + # used to authenticate with BigQuery (this example Sheet is public). + sheet_url = ( + "https://docs.google.com/spreadsheets" + "/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing" + ) + external_config.source_uris = [sheet_url] + external_config.schema = [ + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), + ] + external_config.options.skip_leading_rows = 1 # Optionally skip header row. + external_config.options.range = ( + "us-states!A20:B49" # Optionally set range of the sheet to query from. + ) + table_id = "us_states" + job_config = bigquery.QueryJobConfig() + job_config.table_definitions = {table_id: external_config} + + # Example query to find states starting with "W". + sql = 'SELECT * FROM `{}` WHERE name LIKE "W%"'.format(table_id) + query_job = client.query(sql, job_config=job_config) # Make an API request. + + # Wait for the query to complete. + w_states = list(query_job) + print( + "There are {} states with names starting with W in the selected range.".format( + len(w_states) + ) + ) + # [END bigquery_query_external_sheets_temp] diff --git a/bigquery/samples/query_script.py b/bigquery/samples/query_script.py new file mode 100644 index 000000000000..453b7c6f9435 --- /dev/null +++ b/bigquery/samples/query_script.py @@ -0,0 +1,69 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def query_script(client): + # [START bigquery_query_script] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # Run a SQL script. + sql_script = """ + -- Declare a variable to hold names as an array. + DECLARE top_names ARRAY; + + -- Build an array of the top 100 names from the year 2017. + SET top_names = ( + SELECT ARRAY_AGG(name ORDER BY number DESC LIMIT 100) + FROM `bigquery-public-data.usa_names.usa_1910_2013` + WHERE year = 2000 + ); + + -- Which names appear as words in Shakespeare's plays? + SELECT + name AS shakespeare_name + FROM UNNEST(top_names) AS name + WHERE name IN ( + SELECT word + FROM `bigquery-public-data.samples.shakespeare` + ); + """ + parent_job = client.query(sql_script) + + # Wait for the whole script to finish. + rows_iterable = parent_job.result() + print("Script created {} child jobs.".format(parent_job.num_child_jobs)) + + # Fetch result rows for the final sub-job in the script. + rows = list(rows_iterable) + print( + "{} of the top 100 names from year 2000 also appear in Shakespeare's works.".format( + len(rows) + ) + ) + + # Fetch jobs created by the SQL script. + child_jobs_iterable = client.list_jobs(parent_job=parent_job) + for child_job in child_jobs_iterable: + child_rows = list(child_job.result()) + print( + "Child job with ID {} produced {} row(s).".format( + child_job.job_id, len(child_rows) + ) + ) + + # [END bigquery_query_script] diff --git a/bigquery/samples/query_to_arrow.py b/bigquery/samples/query_to_arrow.py index b13dcf3e1413..4cc69d4e902a 100644 --- a/bigquery/samples/query_to_arrow.py +++ b/bigquery/samples/query_to_arrow.py @@ -41,7 +41,7 @@ def query_to_arrow(client): CROSS JOIN UNNEST(r.participants) as participant; """ query_job = client.query(sql) - arrow_table = query_job.to_arrow() + arrow_table = query_job.to_arrow() # Make an API request. print( "Downloaded {} rows, {} columns.".format( diff --git a/bigquery/samples/table_exists.py b/bigquery/samples/table_exists.py new file mode 100644 index 000000000000..a011e6e2915d --- /dev/null +++ b/bigquery/samples/table_exists.py @@ -0,0 +1,29 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def table_exists(client, table_id): + + # [START bigquery_table_exists] + from google.cloud.exceptions import NotFound + + # TODO(developer): Set table_id to the ID of the table to determine existence. + # table_id = "your-project.your_dataset.your_table" + + try: + client.get_table(table_id) # Make an API request. + print("Table {} already exists.".format(table_id)) + except NotFound: + print("Table {} is not found.".format(table_id)) + # [END bigquery_table_exists] diff --git a/bigquery/samples/table_insert_rows.py b/bigquery/samples/table_insert_rows.py new file mode 100644 index 000000000000..e2f949b635a6 --- /dev/null +++ b/bigquery/samples/table_insert_rows.py @@ -0,0 +1,34 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def table_insert_rows(client, table_id): + + # [START bigquery_table_insert_rows] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set table_id to the ID of the model to fetch. + # table_id = "your-project.your_dataset.your_table" + + table = client.get_table(table_id) # Make an API request. + rows_to_insert = [(u"Phred Phlyntstone", 32), (u"Wylma Phlyntstone", 29)] + + errors = client.insert_rows(table, rows_to_insert) # Make an API request. + if errors == []: + print("New rows have been added.") + # [END bigquery_table_insert_rows] diff --git a/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py b/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py new file mode 100644 index 000000000000..953e7e210312 --- /dev/null +++ b/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py @@ -0,0 +1,36 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def table_insert_rows_explicit_none_insert_ids(client, table_id): + + # [START bigquery_table_insert_rows_explicit_none_insert_ids] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set table_id to the ID of the model to fetch. + # table_id = "your-project.your_dataset.your_table" + + table = client.get_table(table_id) # Make an API request. + rows_to_insert = [(u"Phred Phlyntstone", 32), (u"Wylma Phlyntstone", 29)] + + errors = client.insert_rows( + table, rows_to_insert, row_ids=[None] * len(rows_to_insert) + ) # Make an API request. + if errors == []: + print("New rows have been added.") + # [END bigquery_table_insert_rows_explicit_none_insert_ids] diff --git a/bigquery/samples/tests/conftest.py b/bigquery/samples/tests/conftest.py index f2bb93112a22..32b23931aa91 100644 --- a/bigquery/samples/tests/conftest.py +++ b/bigquery/samples/tests/conftest.py @@ -57,7 +57,7 @@ def random_routine_id(client, dataset_id): @pytest.fixture def dataset_id(client): now = datetime.datetime.now() - dataset_id = "python_samples_{}_{}".format( + dataset_id = "python_dataset_sample_{}_{}".format( now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] ) dataset = client.create_dataset(dataset_id) @@ -68,7 +68,7 @@ def dataset_id(client): @pytest.fixture def table_id(client, dataset_id): now = datetime.datetime.now() - table_id = "python_samples_{}_{}".format( + table_id = "python_table_sample_{}_{}".format( now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] ) @@ -86,7 +86,7 @@ def table_with_data_id(client): @pytest.fixture def routine_id(client, dataset_id): now = datetime.datetime.now() - routine_id = "python_samples_{}_{}".format( + routine_id = "python_routine_sample_{}_{}".format( now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] ) diff --git a/bigquery/samples/tests/test_browse_table_data.py b/bigquery/samples/tests/test_browse_table_data.py index f777bf91ca00..0e9cc6055494 100644 --- a/bigquery/samples/tests/test_browse_table_data.py +++ b/bigquery/samples/tests/test_browse_table_data.py @@ -24,3 +24,5 @@ def test_browse_table_data(capsys, client, table_with_data_id): assert "Downloaded 10 rows from table {}".format(table_with_data_id) in out assert "Selected 2 columns from table {}".format(table_with_data_id) in out assert "Downloaded 10 rows from table {}".format(table_with_data_id) in out + assert "word" in out + assert "LVII" in out diff --git a/bigquery/samples/tests/test_client_list_jobs.py b/bigquery/samples/tests/test_client_list_jobs.py new file mode 100644 index 000000000000..011e081fdee4 --- /dev/null +++ b/bigquery/samples/tests/test_client_list_jobs.py @@ -0,0 +1,31 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import client_list_jobs +from .. import create_job + + +def test_client_list_jobs(capsys, client): + + job = create_job.create_job(client) + client.cancel_job(job.job_id) + job.cancel() + client_list_jobs.client_list_jobs(client) + out, err = capsys.readouterr() + assert "Started job: {}".format(job.job_id) in out + assert "Last 10 jobs:" in out + assert "Jobs from the last ten minutes:" in out + assert "Last 10 jobs run by all users:" in out + assert "Last 10 jobs done:" in out diff --git a/bigquery/samples/tests/test_client_query.py b/bigquery/samples/tests/test_client_query.py new file mode 100644 index 000000000000..fd5b8e7edd97 --- /dev/null +++ b/bigquery/samples/tests/test_client_query.py @@ -0,0 +1,24 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import client_query + + +def test_client_query(capsys, client): + + client_query.client_query(client) + out, err = capsys.readouterr() + assert "The query data:" in out + assert "name=James, count=272793" in out diff --git a/bigquery/samples/tests/test_copy_table.py b/bigquery/samples/tests/test_copy_table.py new file mode 100644 index 000000000000..6d7de2d9132c --- /dev/null +++ b/bigquery/samples/tests/test_copy_table.py @@ -0,0 +1,27 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import copy_table + + +def test_copy_table(capsys, client, table_with_data_id, random_table_id): + + copy_table.copy_table(client, table_with_data_id, random_table_id) + out, err = capsys.readouterr() + assert "A copy of the table created." in out + assert ( + client.get_table(random_table_id).num_rows + == client.get_table(table_with_data_id).num_rows + ) diff --git a/bigquery/samples/tests/test_create_dataset.py b/bigquery/samples/tests/test_create_dataset.py index dfadc67d8468..e52e9ddfdced 100644 --- a/bigquery/samples/tests/test_create_dataset.py +++ b/bigquery/samples/tests/test_create_dataset.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import create_dataset diff --git a/bigquery/samples/tests/test_create_job.py b/bigquery/samples/tests/test_create_job.py index fce005ae8236..5ead51156606 100644 --- a/bigquery/samples/tests/test_create_job.py +++ b/bigquery/samples/tests/test_create_job.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import create_job diff --git a/bigquery/samples/tests/test_create_routine.py b/bigquery/samples/tests/test_create_routine.py new file mode 100644 index 000000000000..7220d63542e2 --- /dev/null +++ b/bigquery/samples/tests/test_create_routine.py @@ -0,0 +1,23 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import create_routine + + +def test_create_routine(capsys, client, random_routine_id): + + create_routine.create_routine(client, random_routine_id) + out, err = capsys.readouterr() + assert "Created routine {}".format(random_routine_id) in out diff --git a/bigquery/samples/tests/test_routine_samples.py b/bigquery/samples/tests/test_create_routine_ddl.py similarity index 69% rename from bigquery/samples/tests/test_routine_samples.py rename to bigquery/samples/tests/test_create_routine_ddl.py index 5a1c69c7f60f..bcb3249d26ef 100644 --- a/bigquery/samples/tests/test_routine_samples.py +++ b/bigquery/samples/tests/test_create_routine_ddl.py @@ -12,27 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. + from google.cloud import bigquery from google.cloud import bigquery_v2 - -def test_create_routine(capsys, client, random_routine_id): - from .. import create_routine - - create_routine.create_routine(client, random_routine_id) - out, err = capsys.readouterr() - assert "Created routine {}".format(random_routine_id) in out +from .. import create_routine_ddl def test_create_routine_ddl(capsys, client, random_routine_id): - from .. import create_routine_ddl create_routine_ddl.create_routine_ddl(client, random_routine_id) routine = client.get_routine(random_routine_id) out, err = capsys.readouterr() - assert "Created routine {}".format(random_routine_id) in out - return routine assert routine.type_ == "SCALAR_FUNCTION" assert routine.language == "SQL" expected_arguments = [ @@ -63,27 +55,3 @@ def test_create_routine_ddl(capsys, client, random_routine_id): ) ] assert routine.arguments == expected_arguments - - -def test_list_routines(capsys, client, dataset_id, routine_id): - from .. import list_routines - - list_routines.list_routines(client, dataset_id) - out, err = capsys.readouterr() - assert "Routines contained in dataset {}:".format(dataset_id) in out - assert routine_id in out - - -def test_delete_routine(capsys, client, routine_id): - from .. import delete_routine - - delete_routine.delete_routine(client, routine_id) - out, err = capsys.readouterr() - assert "Deleted routine {}.".format(routine_id) in out - - -def test_update_routine(client, routine_id): - from .. import update_routine - - routine = update_routine.update_routine(client, routine_id) - assert routine.body == "x * 4" diff --git a/bigquery/samples/tests/test_create_table.py b/bigquery/samples/tests/test_create_table.py index 093ee6e94277..f9ebc0e5d70d 100644 --- a/bigquery/samples/tests/test_create_table.py +++ b/bigquery/samples/tests/test_create_table.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import create_table diff --git a/bigquery/samples/tests/test_create_table_range_partitioned.py b/bigquery/samples/tests/test_create_table_range_partitioned.py new file mode 100644 index 000000000000..ca186f9a7554 --- /dev/null +++ b/bigquery/samples/tests/test_create_table_range_partitioned.py @@ -0,0 +1,28 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import create_table_range_partitioned + + +def test_create_table_range_partitioned(capsys, client, random_table_id): + table = create_table_range_partitioned.create_table_range_partitioned( + client, random_table_id + ) + out, _ = capsys.readouterr() + assert "Created table {}".format(random_table_id) in out + assert table.range_partitioning.field == "zipcode" + assert table.range_partitioning.range_.start == 0 + assert table.range_partitioning.range_.end == 100000 + assert table.range_partitioning.range_.interval == 10 diff --git a/bigquery/samples/tests/test_dataset_label_samples.py b/bigquery/samples/tests/test_dataset_label_samples.py index 94a2092407b0..1e526f2339ac 100644 --- a/bigquery/samples/tests/test_dataset_label_samples.py +++ b/bigquery/samples/tests/test_dataset_label_samples.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import delete_dataset_labels from .. import get_dataset_labels from .. import label_dataset diff --git a/bigquery/samples/tests/test_delete_dataset.py b/bigquery/samples/tests/test_delete_dataset.py index 2b1b6ad06195..836b3aebb272 100644 --- a/bigquery/samples/tests/test_delete_dataset.py +++ b/bigquery/samples/tests/test_delete_dataset.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import delete_dataset diff --git a/bigquery/samples/tests/test_delete_routine.py b/bigquery/samples/tests/test_delete_routine.py new file mode 100644 index 000000000000..9347d1e22dc2 --- /dev/null +++ b/bigquery/samples/tests/test_delete_routine.py @@ -0,0 +1,23 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import delete_routine + + +def test_delete_routine(capsys, client, routine_id): + + delete_routine.delete_routine(client, routine_id) + out, err = capsys.readouterr() + assert "Deleted routine {}.".format(routine_id) in out diff --git a/bigquery/samples/tests/test_delete_table.py b/bigquery/samples/tests/test_delete_table.py index 8f4796623a83..f76ad8624cc6 100644 --- a/bigquery/samples/tests/test_delete_table.py +++ b/bigquery/samples/tests/test_delete_table.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import delete_table diff --git a/bigquery/samples/tests/test_get_dataset.py b/bigquery/samples/tests/test_get_dataset.py index 374f8835211a..8682be7ee3e9 100644 --- a/bigquery/samples/tests/test_get_dataset.py +++ b/bigquery/samples/tests/test_get_dataset.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import get_dataset @@ -19,4 +20,4 @@ def test_get_dataset(capsys, client, dataset_id): get_dataset.get_dataset(client, dataset_id) out, err = capsys.readouterr() - assert "{}".format(dataset_id) in out + assert dataset_id in out diff --git a/bigquery/samples/tests/test_get_routine.py b/bigquery/samples/tests/test_get_routine.py new file mode 100644 index 000000000000..fa5f3093116c --- /dev/null +++ b/bigquery/samples/tests/test_get_routine.py @@ -0,0 +1,27 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import get_routine + + +def test_get_routine(capsys, client, routine_id): + + get_routine.get_routine(client, routine_id) + out, err = capsys.readouterr() + assert "Routine '{}':".format(routine_id) in out + assert "Type: 'SCALAR_FUNCTION'" in out + assert "Language: 'SQL'" in out + assert "Name: 'x'" in out + assert "Type: 'type_kind: INT64\n'" in out diff --git a/bigquery/samples/tests/test_get_table.py b/bigquery/samples/tests/test_get_table.py index b811ccecad1f..8adaa6557954 100644 --- a/bigquery/samples/tests/test_get_table.py +++ b/bigquery/samples/tests/test_get_table.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. + from google.cloud import bigquery + from .. import get_table @@ -30,7 +32,7 @@ def test_get_table(capsys, client, random_table_id): get_table.get_table(client, random_table_id) out, err = capsys.readouterr() assert "Got table '{}'.".format(random_table_id) in out - assert "full_name" in out # test that schema is printed + assert "full_name" in out assert "Table description: Sample Table" in out assert "Table has 0 rows" in out client.delete_table(table, not_found_ok=True) diff --git a/bigquery/samples/tests/test_list_datasets.py b/bigquery/samples/tests/test_list_datasets.py index 4c66a24f9b1a..d8c32e91ee20 100644 --- a/bigquery/samples/tests/test_list_datasets.py +++ b/bigquery/samples/tests/test_list_datasets.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import list_datasets diff --git a/bigquery/samples/tests/test_list_datasets_by_label.py b/bigquery/samples/tests/test_list_datasets_by_label.py index 346cbf1a982d..f414539b00b3 100644 --- a/bigquery/samples/tests/test_list_datasets_by_label.py +++ b/bigquery/samples/tests/test_list_datasets_by_label.py @@ -23,4 +23,4 @@ def test_list_datasets_by_label(capsys, client, dataset_id): dataset = client.update_dataset(dataset, ["labels"]) list_datasets_by_label.list_datasets_by_label(client) out, err = capsys.readouterr() - assert "{}".format(dataset_id) in out + assert dataset_id in out diff --git a/bigquery/samples/tests/test_list_routines.py b/bigquery/samples/tests/test_list_routines.py new file mode 100644 index 000000000000..e249238e1976 --- /dev/null +++ b/bigquery/samples/tests/test_list_routines.py @@ -0,0 +1,24 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import list_routines + + +def test_list_routines(capsys, client, dataset_id, routine_id): + + list_routines.list_routines(client, dataset_id) + out, err = capsys.readouterr() + assert "Routines contained in dataset {}:".format(dataset_id) in out + assert routine_id in out diff --git a/bigquery/samples/tests/test_list_tables.py b/bigquery/samples/tests/test_list_tables.py index ec1621ac7579..61ac04ea26ce 100644 --- a/bigquery/samples/tests/test_list_tables.py +++ b/bigquery/samples/tests/test_list_tables.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import list_tables diff --git a/bigquery/samples/tests/test_load_table_dataframe.py b/bigquery/samples/tests/test_load_table_dataframe.py index d553d449a525..2151704d3b25 100644 --- a/bigquery/samples/tests/test_load_table_dataframe.py +++ b/bigquery/samples/tests/test_load_table_dataframe.py @@ -12,16 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. + import pytest from .. import load_table_dataframe -pytest.importorskip("pandas") -pytest.importorskip("pyarrow") +pandas = pytest.importorskip("pandas") +pyarrow = pytest.importorskip("pyarrow") def test_load_table_dataframe(capsys, client, random_table_id): + table = load_table_dataframe.load_table_dataframe(client, random_table_id) out, _ = capsys.readouterr() assert "Loaded 4 rows and 3 columns" in out diff --git a/bigquery/samples/tests/test_model_samples.py b/bigquery/samples/tests/test_model_samples.py index d7b06a92a3e1..99d838533917 100644 --- a/bigquery/samples/tests/test_model_samples.py +++ b/bigquery/samples/tests/test_model_samples.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import delete_model from .. import get_model from .. import list_models diff --git a/bigquery/samples/tests/test_query_external_sheets_permanent_table.py b/bigquery/samples/tests/test_query_external_sheets_permanent_table.py new file mode 100644 index 000000000000..a7b5db09e5af --- /dev/null +++ b/bigquery/samples/tests/test_query_external_sheets_permanent_table.py @@ -0,0 +1,25 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import query_external_sheets_permanent_table + + +def test_query_external_sheets_permanent_table(capsys, dataset_id): + + query_external_sheets_permanent_table.query_external_sheets_permanent_table( + dataset_id + ) + out, err = capsys.readouterr() + assert "There are 2 states with names starting with W in the selected range." in out diff --git a/bigquery/samples/tests/test_query_external_sheets_temporary_table.py b/bigquery/samples/tests/test_query_external_sheets_temporary_table.py new file mode 100644 index 000000000000..4856b6a49d2b --- /dev/null +++ b/bigquery/samples/tests/test_query_external_sheets_temporary_table.py @@ -0,0 +1,23 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import query_external_sheets_temporary_table + + +def test_query_external_sheets_temporary_table(capsys): + + query_external_sheets_temporary_table.query_external_sheets_temporary_table() + out, err = capsys.readouterr() + assert "There are 2 states with names starting with W in the selected range." in out diff --git a/bigquery/samples/tests/test_query_script.py b/bigquery/samples/tests/test_query_script.py new file mode 100644 index 000000000000..70bb9df76fd4 --- /dev/null +++ b/bigquery/samples/tests/test_query_script.py @@ -0,0 +1,28 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import query_script + + +def test_query_script(capsys, client): + + query_script.query_script(client) + out, _ = capsys.readouterr() + assert "Script created 2 child jobs." in out + assert ( + "53 of the top 100 names from year 2000 also appear in Shakespeare's works." + in out + ) + assert "produced 53 row(s)" in out + assert "produced 1 row(s)" in out diff --git a/bigquery/samples/tests/test_query_to_arrow.py b/bigquery/samples/tests/test_query_to_arrow.py index f70bd49fe565..2fbed807ece4 100644 --- a/bigquery/samples/tests/test_query_to_arrow.py +++ b/bigquery/samples/tests/test_query_to_arrow.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + import pyarrow from .. import query_to_arrow @@ -22,7 +23,6 @@ def test_query_to_arrow(capsys, client): arrow_table = query_to_arrow.query_to_arrow(client) out, err = capsys.readouterr() assert "Downloaded 8 rows, 2 columns." in out - arrow_schema = arrow_table.schema assert arrow_schema.names == ["race", "participant"] assert pyarrow.types.is_string(arrow_schema.types[0]) diff --git a/bigquery/samples/tests/test_table_exists.py b/bigquery/samples/tests/test_table_exists.py new file mode 100644 index 000000000000..232d77fbcb60 --- /dev/null +++ b/bigquery/samples/tests/test_table_exists.py @@ -0,0 +1,30 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud import bigquery + +from .. import table_exists + + +def test_table_exists(capsys, client, random_table_id): + + table_exists.table_exists(client, random_table_id) + out, err = capsys.readouterr() + assert "Table {} is not found.".format(random_table_id) in out + table = bigquery.Table(random_table_id) + table = client.create_table(table) + table_exists.table_exists(client, random_table_id) + out, err = capsys.readouterr() + assert "Table {} already exists.".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_table_insert_rows.py b/bigquery/samples/tests/test_table_insert_rows.py new file mode 100644 index 000000000000..95d119dbdc93 --- /dev/null +++ b/bigquery/samples/tests/test_table_insert_rows.py @@ -0,0 +1,33 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud import bigquery + +from .. import table_insert_rows + + +def test_table_insert_rows(capsys, client, random_table_id): + + schema = [ + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), + ] + + table = bigquery.Table(random_table_id, schema=schema) + table = client.create_table(table) + + table_insert_rows.table_insert_rows(client, random_table_id) + out, err = capsys.readouterr() + assert "New rows have been added." in out diff --git a/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py b/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py new file mode 100644 index 000000000000..6a59609baacf --- /dev/null +++ b/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py @@ -0,0 +1,33 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud import bigquery + +from .. import table_insert_rows_explicit_none_insert_ids as mut + + +def test_table_insert_rows_explicit_none_insert_ids(capsys, client, random_table_id): + + schema = [ + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), + ] + + table = bigquery.Table(random_table_id, schema=schema) + table = client.create_table(table) + + mut.table_insert_rows_explicit_none_insert_ids(client, random_table_id) + out, err = capsys.readouterr() + assert "New rows have been added." in out diff --git a/bigquery/samples/tests/test_update_dataset_access.py b/bigquery/samples/tests/test_update_dataset_access.py index ae33dbfe4a4c..679b700731e3 100644 --- a/bigquery/samples/tests/test_update_dataset_access.py +++ b/bigquery/samples/tests/test_update_dataset_access.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import update_dataset_access diff --git a/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py b/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py new file mode 100644 index 000000000000..55fa4b0d96fb --- /dev/null +++ b/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py @@ -0,0 +1,31 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import update_dataset_default_partition_expiration + + +def test_update_dataset_default_partition_expiration(capsys, client, dataset_id): + + ninety_days_ms = 90 * 24 * 60 * 60 * 1000 # in milliseconds + + update_dataset_default_partition_expiration.update_dataset_default_partition_expiration( + client, dataset_id + ) + out, _ = capsys.readouterr() + assert ( + "Updated dataset {} with new default partition expiration {}".format( + dataset_id, ninety_days_ms + ) + in out + ) diff --git a/bigquery/samples/tests/test_update_dataset_default_table_expiration.py b/bigquery/samples/tests/test_update_dataset_default_table_expiration.py index 46e9654209ed..a97de11a2f1a 100644 --- a/bigquery/samples/tests/test_update_dataset_default_table_expiration.py +++ b/bigquery/samples/tests/test_update_dataset_default_table_expiration.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import update_dataset_default_table_expiration diff --git a/bigquery/samples/tests/test_update_dataset_description.py b/bigquery/samples/tests/test_update_dataset_description.py index c6f8889f50da..63826077b976 100644 --- a/bigquery/samples/tests/test_update_dataset_description.py +++ b/bigquery/samples/tests/test_update_dataset_description.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import update_dataset_description diff --git a/translate/tests/unit/__init__.py b/bigquery/samples/tests/test_update_routine.py similarity index 65% rename from translate/tests/unit/__init__.py rename to bigquery/samples/tests/test_update_routine.py index df379f1e9d88..8adfab32e032 100644 --- a/translate/tests/unit/__init__.py +++ b/bigquery/samples/tests/test_update_routine.py @@ -1,13 +1,22 @@ -# Copyright 2016 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + + +from .. import update_routine + + +def test_update_routine(client, routine_id): + + routine = update_routine.update_routine(client, routine_id) + assert routine.body == "x * 4" diff --git a/bigquery/samples/tests/test_update_table_require_partition_filter.py b/bigquery/samples/tests/test_update_table_require_partition_filter.py new file mode 100644 index 000000000000..1cbd2b2279b2 --- /dev/null +++ b/bigquery/samples/tests/test_update_table_require_partition_filter.py @@ -0,0 +1,33 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import bigquery +from .. import update_table_require_partition_filter + + +def test_update_table_require_partition_filter(capsys, client, random_table_id): + # Make a partitioned table. + schema = [bigquery.SchemaField("transaction_timestamp", "TIMESTAMP")] + table = bigquery.Table(random_table_id, schema) + table.time_partitioning = bigquery.TimePartitioning(field="transaction_timestamp") + table = client.create_table(table) + + update_table_require_partition_filter.update_table_require_partition_filter( + client, random_table_id + ) + out, _ = capsys.readouterr() + assert ( + "Updated table '{}' with require_partition_filter=True".format(random_table_id) + in out + ) diff --git a/bigquery/samples/update_dataset_access.py b/bigquery/samples/update_dataset_access.py index aa316a38dff9..134cf1b940cf 100644 --- a/bigquery/samples/update_dataset_access.py +++ b/bigquery/samples/update_dataset_access.py @@ -24,7 +24,7 @@ def update_dataset_access(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # Make an API request. entry = bigquery.AccessEntry( role="READER", @@ -36,7 +36,7 @@ def update_dataset_access(client, dataset_id): entries.append(entry) dataset.access_entries = entries - dataset = client.update_dataset(dataset, ["access_entries"]) # API request + dataset = client.update_dataset(dataset, ["access_entries"]) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_dataset_default_partition_expiration.py b/bigquery/samples/update_dataset_default_partition_expiration.py new file mode 100644 index 000000000000..502d52ff199b --- /dev/null +++ b/bigquery/samples/update_dataset_default_partition_expiration.py @@ -0,0 +1,43 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def update_dataset_default_partition_expiration(client, dataset_id): + + # [START bigquery_update_dataset_partition_expiration] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set dataset_id to the ID of the dataset to fetch. + # dataset_id = 'your-project.your_dataset' + + dataset = client.get_dataset(dataset_id) # Make an API request. + + # Set the default partition expiration (applies to new tables, only) in + # milliseconds. This example sets the default expiration to 90 days. + dataset.default_partition_expiration_ms = 90 * 24 * 60 * 60 * 1000 + + dataset = client.update_dataset( + dataset, ["default_partition_expiration_ms"] + ) # Make an API request. + + print( + "Updated dataset {}.{} with new default partition expiration {}".format( + dataset.project, dataset.dataset_id, dataset.default_partition_expiration_ms + ) + ) + # [END bigquery_update_dataset_partition_expiration] diff --git a/bigquery/samples/update_dataset_default_table_expiration.py b/bigquery/samples/update_dataset_default_table_expiration.py index 4534bb2011eb..8de354b1f21b 100644 --- a/bigquery/samples/update_dataset_default_table_expiration.py +++ b/bigquery/samples/update_dataset_default_table_expiration.py @@ -25,12 +25,12 @@ def update_dataset_default_table_expiration(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) - dataset.default_table_expiration_ms = 24 * 60 * 60 * 1000 # in milliseconds + dataset = client.get_dataset(dataset_id) # Make an API request. + dataset.default_table_expiration_ms = 24 * 60 * 60 * 1000 # In milliseconds. dataset = client.update_dataset( dataset, ["default_table_expiration_ms"] - ) # API request + ) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_dataset_description.py b/bigquery/samples/update_dataset_description.py index f3afb7fa68ce..08eed8da2b64 100644 --- a/bigquery/samples/update_dataset_description.py +++ b/bigquery/samples/update_dataset_description.py @@ -25,9 +25,9 @@ def update_dataset_description(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # Make an API request. dataset.description = "Updated description." - dataset = client.update_dataset(dataset, ["description"]) + dataset = client.update_dataset(dataset, ["description"]) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_model.py b/bigquery/samples/update_model.py index 5df4ada886ed..7583c410e1ef 100644 --- a/bigquery/samples/update_model.py +++ b/bigquery/samples/update_model.py @@ -26,9 +26,9 @@ def update_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - model = client.get_model(model_id) + model = client.get_model(model_id) # Make an API request. model.description = "This model was modified from a Python program." - model = client.update_model(model, ["description"]) + model = client.update_model(model, ["description"]) # Make an API request. full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) print( diff --git a/bigquery/samples/update_routine.py b/bigquery/samples/update_routine.py index 4d491d42e168..4489d68f7ee4 100644 --- a/bigquery/samples/update_routine.py +++ b/bigquery/samples/update_routine.py @@ -33,13 +33,14 @@ def update_routine(client, routine_id): routine, [ "body", - # Due to a limitation of the API, all fields are required, not just + # Due to a limitation of the API, + # all fields are required, not just # those that have been updated. "arguments", "language", "type_", "return_type", ], - ) + ) # Make an API request. # [END bigquery_update_routine] return routine diff --git a/bigquery/samples/update_table_require_partition_filter.py b/bigquery/samples/update_table_require_partition_filter.py new file mode 100644 index 000000000000..4c6be2d2cedc --- /dev/null +++ b/bigquery/samples/update_table_require_partition_filter.py @@ -0,0 +1,41 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def update_table_require_partition_filter(client, table_id): + + # [START bigquery_update_table_require_partition_filter] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set table_id to the ID of the model to fetch. + # table_id = 'your-project.your_dataset.your_table' + + table = client.get_table(table_id) # Make an API request. + table.require_partition_filter = True + table = client.update_table(table, ["require_partition_filter"]) + + # View table properties + print( + "Updated table '{}.{}.{}' with require_partition_filter={}.".format( + table.project, + table.dataset_id, + table.table_id, + table.require_partition_filter, + ) + ) + # [END bigquery_update_table_require_partition_filter] diff --git a/bigquery/setup.py b/bigquery/setup.py index 42f59516fc13..8331d2c07114 100644 --- a/bigquery/setup.py +++ b/bigquery/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-bigquery" description = "Google BigQuery API client library" -version = "1.19.0" +version = "1.21.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' @@ -31,7 +31,7 @@ dependencies = [ 'enum34; python_version < "3.4"', "google-cloud-core >= 1.0.3, < 2.0dev", - "google-resumable-media >= 0.3.1", + "google-resumable-media >= 0.3.1, != 0.4.0, < 0.6.0dev", "protobuf >= 3.6.0", ] extras = { diff --git a/bigquery/synth.metadata b/bigquery/synth.metadata index b5cce0083f8e..863d7b1ad9e6 100644 --- a/bigquery/synth.metadata +++ b/bigquery/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:13:25.893023Z", + "updateTime": "2019-10-29T12:13:17.119821Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } } ], diff --git a/bigquery/synth.py b/bigquery/synth.py index a8370fd4e96a..a20426d3910f 100644 --- a/bigquery/synth.py +++ b/bigquery/synth.py @@ -35,6 +35,7 @@ library / "google/cloud/bigquery_v2/gapic/enums.py", library / "google/cloud/bigquery_v2/types.py", library / "google/cloud/bigquery_v2/proto/location*", + library / "google/cloud/bigquery_v2/proto/encryption_config*", library / "google/cloud/bigquery_v2/proto/model*", library / "google/cloud/bigquery_v2/proto/standard_sql*", ], @@ -46,6 +47,11 @@ '"""Attributes:', '"""Protocol buffer.\n\n Attributes:', ) +s.replace( + "google/cloud/bigquery_v2/proto/encryption_config_pb2.py", + '"""Attributes:', + '"""Encryption configuration.\n\n Attributes:', +) # Remove non-ascii characters from docstrings for Python 2.7. # Format quoted strings as plain text. diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index 3593e1ecb609..4816962a70d6 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -1951,6 +1951,73 @@ def test_query_results_to_dataframe_w_bqstorage(self): if not row[col] is None: self.assertIsInstance(row[col], exp_datatypes[col]) + @unittest.skipIf(pandas is None, "Requires `pandas`") + def test_insert_rows_from_dataframe(self): + SF = bigquery.SchemaField + schema = [ + SF("float_col", "FLOAT", mode="REQUIRED"), + SF("int_col", "INTEGER", mode="REQUIRED"), + SF("bool_col", "BOOLEAN", mode="REQUIRED"), + SF("string_col", "STRING", mode="NULLABLE"), + ] + + dataframe = pandas.DataFrame( + [ + { + "float_col": 1.11, + "bool_col": True, + "string_col": "my string", + "int_col": 10, + }, + { + "float_col": 2.22, + "bool_col": False, + "string_col": "another string", + "int_col": 20, + }, + { + "float_col": 3.33, + "bool_col": False, + "string_col": "another string", + "int_col": 30, + }, + { + "float_col": 4.44, + "bool_col": True, + "string_col": "another string", + "int_col": 40, + }, + { + "float_col": 5.55, + "bool_col": False, + "string_col": "another string", + "int_col": 50, + }, + ] + ) + + table_id = "test_table" + dataset = self.temp_dataset(_make_dataset_id("issue_7553")) + table_arg = Table(dataset.table(table_id), schema=schema) + table = retry_403(Config.CLIENT.create_table)(table_arg) + self.to_delete.insert(0, table) + + Config.CLIENT.insert_rows_from_dataframe(table, dataframe, chunk_size=3) + + retry = RetryResult(_has_rows, max_tries=8) + rows = retry(self._fetch_single_page)(table) + + sorted_rows = sorted(rows, key=operator.attrgetter("int_col")) + row_tuples = [r.values() for r in sorted_rows] + expected = [tuple(data_row) for data_row in dataframe.itertuples(index=False)] + + assert len(row_tuples) == len(expected) + + for row, expected_row in zip(row_tuples, expected): + six.assertCountEqual( + self, row, expected_row + ) # column order does not matter + def test_insert_rows_nested_nested(self): # See #2951 SF = bigquery.SchemaField @@ -2315,6 +2382,27 @@ def test_list_rows_page_size(self): page = next(pages) self.assertEqual(page.num_items, num_last_page) + @unittest.skipIf(pandas is None, "Requires `pandas`") + @unittest.skipIf( + bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" + ) + def test_list_rows_max_results_w_bqstorage(self): + table_ref = DatasetReference("bigquery-public-data", "utility_us").table( + "country_code_iso" + ) + bqstorage_client = bigquery_storage_v1beta1.BigQueryStorageClient( + credentials=Config.CLIENT._credentials + ) + + row_iterator = Config.CLIENT.list_rows( + table_ref, + selected_fields=[bigquery.SchemaField("country_name", "STRING")], + max_results=100, + ) + dataframe = row_iterator.to_dataframe(bqstorage_client=bqstorage_client) + + self.assertEqual(len(dataframe.index), 100) + def temp_dataset(self, dataset_id, location=None): dataset = Dataset(Config.CLIENT.dataset(dataset_id)) if location: diff --git a/bigquery/tests/unit/model/test_model.py b/bigquery/tests/unit/model/test_model.py index b6d9756e15fe..bbb93ef9e897 100644 --- a/bigquery/tests/unit/model/test_model.py +++ b/bigquery/tests/unit/model/test_model.py @@ -21,6 +21,8 @@ import google.cloud._helpers from google.cloud.bigquery_v2.gapic import enums +KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" + @pytest.fixture def target_class(): @@ -99,6 +101,7 @@ def test_from_api_repr(target_class): }, ], "featureColumns": [], + "encryptionConfiguration": {"kmsKeyName": KMS_KEY_NAME}, } got = target_class.from_api_repr(resource) @@ -116,6 +119,7 @@ def test_from_api_repr(target_class): assert got.friendly_name == u"A friendly name." assert got.model_type == enums.Model.ModelType.LOGISTIC_REGRESSION assert got.labels == {"greeting": u"こんにちは"} + assert got.encryption_configuration.kms_key_name == KMS_KEY_NAME assert got.training_runs[0].training_options.initial_learn_rate == 1.0 assert ( got.training_runs[0] @@ -160,6 +164,7 @@ def test_from_api_repr_w_minimal_resource(target_class): assert got.friendly_name is None assert got.model_type == enums.Model.ModelType.MODEL_TYPE_UNSPECIFIED assert got.labels == {} + assert got.encryption_configuration is None assert len(got.training_runs) == 0 assert len(got.feature_columns) == 0 assert len(got.label_columns) == 0 @@ -229,6 +234,17 @@ def test_from_api_repr_w_unknown_fields(target_class): ["labels"], {"labels": {"a-label": "a-value"}}, ), + ( + { + "friendlyName": "hello", + "description": "world", + "expirationTime": None, + "labels": {"a-label": "a-value"}, + "encryptionConfiguration": {"kmsKeyName": KMS_KEY_NAME}, + }, + ["encryptionConfiguration"], + {"encryptionConfiguration": {"kmsKeyName": KMS_KEY_NAME}}, + ), ], ) def test_build_resource(object_under_test, resource, filter_fields, expected): @@ -283,6 +299,18 @@ def test_replace_labels(object_under_test): assert object_under_test.labels == {} +def test_set_encryption_configuration(object_under_test): + from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration + + assert not object_under_test.encryption_configuration + object_under_test.encryption_configuration = EncryptionConfiguration( + kms_key_name=KMS_KEY_NAME + ) + assert object_under_test.encryption_configuration.kms_key_name == KMS_KEY_NAME + object_under_test.encryption_configuration = None + assert not object_under_test.encryption_configuration + + def test_repr(target_class): model = target_class("my-proj.my_dset.my_model") got = repr(model) diff --git a/bigquery/tests/unit/test__helpers.py b/bigquery/tests/unit/test__helpers.py index 3884695d83af..6d92b4de73ba 100644 --- a/bigquery/tests/unit/test__helpers.py +++ b/bigquery/tests/unit/test__helpers.py @@ -17,6 +17,8 @@ import decimal import unittest +import mock + class Test_not_null(unittest.TestCase): def _call_fut(self, value, field): @@ -412,7 +414,8 @@ class Test_row_tuple_from_json(unittest.TestCase): def _call_fut(self, row, schema): from google.cloud.bigquery._helpers import _row_tuple_from_json - return _row_tuple_from_json(row, schema) + with _field_isinstance_patcher(): + return _row_tuple_from_json(row, schema) def test_w_single_scalar_column(self): # SELECT 1 AS col @@ -529,7 +532,8 @@ class Test_rows_from_json(unittest.TestCase): def _call_fut(self, rows, schema): from google.cloud.bigquery._helpers import _rows_from_json - return _rows_from_json(rows, schema) + with _field_isinstance_patcher(): + return _rows_from_json(rows, schema) def test_w_record_subfield(self): from google.cloud.bigquery.table import Row @@ -1023,3 +1027,23 @@ def __init__(self, mode, name="unknown", field_type="UNKNOWN", fields=()): self.name = name self.field_type = field_type self.fields = fields + + +def _field_isinstance_patcher(): + """A patcher thank makes _Field instances seem like SchemaField instances. + """ + from google.cloud.bigquery.schema import SchemaField + + def fake_isinstance(instance, target_class): + if instance.__class__.__name__ != "_Field": + return isinstance(instance, target_class) # pragma: NO COVER + + # pretend that _Field() instances are actually instances of SchemaField + return target_class is SchemaField or ( + isinstance(target_class, tuple) and SchemaField in target_class + ) + + patcher = mock.patch( + "google.cloud.bigquery.schema.isinstance", side_effect=fake_isinstance + ) + return patcher diff --git a/bigquery/tests/unit/test__pandas_helpers.py b/bigquery/tests/unit/test__pandas_helpers.py index b539abe9a89a..a6ccec2e094f 100644 --- a/bigquery/tests/unit/test__pandas_helpers.py +++ b/bigquery/tests/unit/test__pandas_helpers.py @@ -16,6 +16,7 @@ import datetime import decimal import functools +import operator import warnings import mock @@ -34,6 +35,7 @@ import pytest import pytz +from google import api_core from google.cloud.bigquery import schema @@ -618,7 +620,7 @@ def test_list_columns_and_indexes_without_named_index(module_under_test): @pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_list_columns_and_indexes_with_named_index_same_as_column_name( - module_under_test + module_under_test, ): df_data = collections.OrderedDict( [ @@ -700,6 +702,32 @@ def test_list_columns_and_indexes_with_multiindex(module_under_test): assert columns_and_indexes == expected +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_dataframe_to_bq_schema_dict_sequence(module_under_test): + df_data = collections.OrderedDict( + [ + ("str_column", [u"hello", u"world"]), + ("int_column", [42, 8]), + ("bool_column", [True, False]), + ] + ) + dataframe = pandas.DataFrame(df_data) + + dict_schema = [ + {"name": "str_column", "type": "STRING", "mode": "NULLABLE"}, + {"name": "bool_column", "type": "BOOL", "mode": "REQUIRED"}, + ] + + returned_schema = module_under_test.dataframe_to_bq_schema(dataframe, dict_schema) + + expected_schema = ( + schema.SchemaField("str_column", "STRING", "NULLABLE"), + schema.SchemaField("int_column", "INTEGER", "NULLABLE"), + schema.SchemaField("bool_column", "BOOL", "REQUIRED"), + ) + assert returned_schema == expected_schema + + @pytest.mark.skipif(pandas is None, reason="Requires `pandas`") @pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") def test_dataframe_to_arrow_with_multiindex(module_under_test): @@ -855,6 +883,28 @@ def test_dataframe_to_arrow_with_unknown_type(module_under_test): assert arrow_schema[3].name == "field03" +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_dataframe_to_arrow_dict_sequence_schema(module_under_test): + dict_schema = [ + {"name": "field01", "type": "STRING", "mode": "REQUIRED"}, + {"name": "field02", "type": "BOOL", "mode": "NULLABLE"}, + ] + + dataframe = pandas.DataFrame( + {"field01": [u"hello", u"world"], "field02": [True, False]} + ) + + arrow_table = module_under_test.dataframe_to_arrow(dataframe, dict_schema) + arrow_schema = arrow_table.schema + + expected_fields = [ + pyarrow.field("field01", "string", nullable=False), + pyarrow.field("field02", "bool", nullable=True), + ] + assert list(arrow_schema) == expected_fields + + @pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_dataframe_to_parquet_without_pyarrow(module_under_test, monkeypatch): monkeypatch.setattr(module_under_test, "pyarrow", None) @@ -905,3 +955,342 @@ def test_dataframe_to_parquet_compression_method(module_under_test): call_args = fake_write_table.call_args assert call_args is not None assert call_args.kwargs.get("compression") == "ZSTD" + + +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_dataframe_to_bq_schema_fallback_needed_wo_pyarrow(module_under_test): + dataframe = pandas.DataFrame( + data=[ + {"id": 10, "status": u"FOO", "execution_date": datetime.date(2019, 5, 10)}, + {"id": 20, "status": u"BAR", "created_at": datetime.date(2018, 9, 12)}, + ] + ) + + no_pyarrow_patch = mock.patch(module_under_test.__name__ + ".pyarrow", None) + + with no_pyarrow_patch, warnings.catch_warnings(record=True) as warned: + detected_schema = module_under_test.dataframe_to_bq_schema( + dataframe, bq_schema=[] + ) + + assert detected_schema is None + + # a warning should also be issued + expected_warnings = [ + warning for warning in warned if "could not determine" in str(warning).lower() + ] + assert len(expected_warnings) == 1 + msg = str(expected_warnings[0]) + assert "execution_date" in msg and "created_at" in msg + + +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_dataframe_to_bq_schema_fallback_needed_w_pyarrow(module_under_test): + dataframe = pandas.DataFrame( + data=[ + {"id": 10, "status": u"FOO", "created_at": datetime.date(2019, 5, 10)}, + {"id": 20, "status": u"BAR", "created_at": datetime.date(2018, 9, 12)}, + ] + ) + + with warnings.catch_warnings(record=True) as warned: + detected_schema = module_under_test.dataframe_to_bq_schema( + dataframe, bq_schema=[] + ) + + expected_schema = ( + schema.SchemaField("id", "INTEGER", mode="NULLABLE"), + schema.SchemaField("status", "STRING", mode="NULLABLE"), + schema.SchemaField("created_at", "DATE", mode="NULLABLE"), + ) + by_name = operator.attrgetter("name") + assert sorted(detected_schema, key=by_name) == sorted(expected_schema, key=by_name) + + # there should be no relevant warnings + unwanted_warnings = [ + warning for warning in warned if "could not determine" in str(warning).lower() + ] + assert not unwanted_warnings + + +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_dataframe_to_bq_schema_pyarrow_fallback_fails(module_under_test): + dataframe = pandas.DataFrame( + data=[ + {"struct_field": {"one": 2}, "status": u"FOO"}, + {"struct_field": {"two": u"222"}, "status": u"BAR"}, + ] + ) + + with warnings.catch_warnings(record=True) as warned: + detected_schema = module_under_test.dataframe_to_bq_schema( + dataframe, bq_schema=[] + ) + + assert detected_schema is None + + # a warning should also be issued + expected_warnings = [ + warning for warning in warned if "could not determine" in str(warning).lower() + ] + assert len(expected_warnings) == 1 + assert "struct_field" in str(expected_warnings[0]) + + +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_augment_schema_type_detection_succeeds(module_under_test): + dataframe = pandas.DataFrame( + data=[ + { + "bool_field": False, + "int_field": 123, + "float_field": 3.141592, + "time_field": datetime.time(17, 59, 47), + "timestamp_field": datetime.datetime(2005, 5, 31, 14, 25, 55), + "date_field": datetime.date(2005, 5, 31), + "bytes_field": b"some bytes", + "string_field": u"some characters", + "numeric_field": decimal.Decimal("123.456"), + } + ] + ) + + # NOTE: In Pandas dataframe, the dtype of Python's datetime instances is + # set to "datetime64[ns]", and pyarrow converts that to pyarrow.TimestampArray. + # We thus cannot expect to get a DATETIME date when converting back to the + # BigQuery type. + + current_schema = ( + schema.SchemaField("bool_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("int_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("float_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("time_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("timestamp_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("date_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("bytes_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("string_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("numeric_field", field_type=None, mode="NULLABLE"), + ) + + with warnings.catch_warnings(record=True) as warned: + augmented_schema = module_under_test.augment_schema(dataframe, current_schema) + + # there should be no relevant warnings + unwanted_warnings = [ + warning for warning in warned if "Pyarrow could not" in str(warning) + ] + assert not unwanted_warnings + + # the augmented schema must match the expected + expected_schema = ( + schema.SchemaField("bool_field", field_type="BOOL", mode="NULLABLE"), + schema.SchemaField("int_field", field_type="INT64", mode="NULLABLE"), + schema.SchemaField("float_field", field_type="FLOAT64", mode="NULLABLE"), + schema.SchemaField("time_field", field_type="TIME", mode="NULLABLE"), + schema.SchemaField("timestamp_field", field_type="TIMESTAMP", mode="NULLABLE"), + schema.SchemaField("date_field", field_type="DATE", mode="NULLABLE"), + schema.SchemaField("bytes_field", field_type="BYTES", mode="NULLABLE"), + schema.SchemaField("string_field", field_type="STRING", mode="NULLABLE"), + schema.SchemaField("numeric_field", field_type="NUMERIC", mode="NULLABLE"), + ) + by_name = operator.attrgetter("name") + assert sorted(augmented_schema, key=by_name) == sorted(expected_schema, key=by_name) + + +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_augment_schema_type_detection_fails(module_under_test): + dataframe = pandas.DataFrame( + data=[ + { + "status": u"FOO", + "struct_field": {"one": 1}, + "struct_field_2": {"foo": u"123"}, + }, + { + "status": u"BAR", + "struct_field": {"two": u"111"}, + "struct_field_2": {"bar": 27}, + }, + ] + ) + current_schema = [ + schema.SchemaField("status", field_type="STRING", mode="NULLABLE"), + schema.SchemaField("struct_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("struct_field_2", field_type=None, mode="NULLABLE"), + ] + + with warnings.catch_warnings(record=True) as warned: + augmented_schema = module_under_test.augment_schema(dataframe, current_schema) + + assert augmented_schema is None + + expected_warnings = [ + warning for warning in warned if "could not determine" in str(warning) + ] + assert len(expected_warnings) == 1 + warning_msg = str(expected_warnings[0]) + assert "pyarrow" in warning_msg.lower() + assert "struct_field" in warning_msg and "struct_field_2" in warning_msg + + +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_dataframe_to_parquet_dict_sequence_schema(module_under_test): + dict_schema = [ + {"name": "field01", "type": "STRING", "mode": "REQUIRED"}, + {"name": "field02", "type": "BOOL", "mode": "NULLABLE"}, + ] + + dataframe = pandas.DataFrame( + {"field01": [u"hello", u"world"], "field02": [True, False]} + ) + + write_table_patch = mock.patch.object( + module_under_test.pyarrow.parquet, "write_table", autospec=True + ) + to_arrow_patch = mock.patch.object( + module_under_test, "dataframe_to_arrow", autospec=True + ) + + with write_table_patch, to_arrow_patch as fake_to_arrow: + module_under_test.dataframe_to_parquet(dataframe, dict_schema, None) + + expected_schema_arg = [ + schema.SchemaField("field01", "STRING", mode="REQUIRED"), + schema.SchemaField("field02", "BOOL", mode="NULLABLE"), + ] + schema_arg = fake_to_arrow.call_args.args[1] + assert schema_arg == expected_schema_arg + + +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_download_arrow_tabledata_list_unknown_field_type(module_under_test): + fake_page = api_core.page_iterator.Page( + parent=mock.Mock(), + items=[{"page_data": "foo"}], + item_to_value=api_core.page_iterator._item_to_value_identity, + ) + fake_page._columns = [[1, 10, 100], [2.2, 22.22, 222.222]] + pages = [fake_page] + + bq_schema = [ + schema.SchemaField("population_size", "INTEGER"), + schema.SchemaField("alien_field", "ALIEN_FLOAT_TYPE"), + ] + + results_gen = module_under_test.download_arrow_tabledata_list(pages, bq_schema) + + with warnings.catch_warnings(record=True) as warned: + result = next(results_gen) + + unwanted_warnings = [ + warning + for warning in warned + if "please pass schema= explicitly" in str(warning).lower() + ] + assert not unwanted_warnings + + assert len(result.columns) == 2 + col = result.columns[0] + assert type(col) is pyarrow.lib.Int64Array + assert list(col) == [1, 10, 100] + col = result.columns[1] + assert type(col) is pyarrow.lib.DoubleArray + assert list(col) == [2.2, 22.22, 222.222] + + +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_download_arrow_tabledata_list_known_field_type(module_under_test): + fake_page = api_core.page_iterator.Page( + parent=mock.Mock(), + items=[{"page_data": "foo"}], + item_to_value=api_core.page_iterator._item_to_value_identity, + ) + fake_page._columns = [[1, 10, 100], ["2.2", "22.22", "222.222"]] + pages = [fake_page] + + bq_schema = [ + schema.SchemaField("population_size", "INTEGER"), + schema.SchemaField("non_alien_field", "STRING"), + ] + + results_gen = module_under_test.download_arrow_tabledata_list(pages, bq_schema) + with warnings.catch_warnings(record=True) as warned: + result = next(results_gen) + + unwanted_warnings = [ + warning + for warning in warned + if "please pass schema= explicitly" in str(warning).lower() + ] + assert not unwanted_warnings + + assert len(result.columns) == 2 + col = result.columns[0] + assert type(col) is pyarrow.lib.Int64Array + assert list(col) == [1, 10, 100] + col = result.columns[1] + assert type(col) is pyarrow.lib.StringArray + assert list(col) == ["2.2", "22.22", "222.222"] + + +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_download_arrow_tabledata_list_dict_sequence_schema(module_under_test): + fake_page = api_core.page_iterator.Page( + parent=mock.Mock(), + items=[{"page_data": "foo"}], + item_to_value=api_core.page_iterator._item_to_value_identity, + ) + fake_page._columns = [[1, 10, 100], ["2.2", "22.22", "222.222"]] + pages = [fake_page] + + dict_schema = [ + {"name": "population_size", "type": "INTEGER", "mode": "NULLABLE"}, + {"name": "non_alien_field", "type": "STRING", "mode": "NULLABLE"}, + ] + + results_gen = module_under_test.download_arrow_tabledata_list(pages, dict_schema) + result = next(results_gen) + + assert len(result.columns) == 2 + col = result.columns[0] + assert type(col) is pyarrow.lib.Int64Array + assert list(col) == [1, 10, 100] + col = result.columns[1] + assert type(col) is pyarrow.lib.StringArray + assert list(col) == ["2.2", "22.22", "222.222"] + + +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_download_dataframe_tabledata_list_dict_sequence_schema(module_under_test): + fake_page = api_core.page_iterator.Page( + parent=mock.Mock(), + items=[{"page_data": "foo"}], + item_to_value=api_core.page_iterator._item_to_value_identity, + ) + fake_page._columns = [[1, 10, 100], ["2.2", "22.22", "222.222"]] + pages = [fake_page] + + dict_schema = [ + {"name": "population_size", "type": "INTEGER", "mode": "NULLABLE"}, + {"name": "non_alien_field", "type": "STRING", "mode": "NULLABLE"}, + ] + + results_gen = module_under_test.download_dataframe_tabledata_list( + pages, dict_schema, dtypes={} + ) + result = next(results_gen) + + expected_result = pandas.DataFrame( + collections.OrderedDict( + [ + ("population_size", [1, 10, 100]), + ("non_alien_field", ["2.2", "22.22", "222.222"]), + ] + ) + ) + assert result.equals(expected_result) diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index da3cee11e5d0..e6ed4d1c8072 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -81,7 +81,7 @@ class TestClient(unittest.TestCase): TABLE_ID = "TABLE_ID" MODEL_ID = "MODEL_ID" TABLE_REF = DatasetReference(PROJECT, DS_ID).table(TABLE_ID) - KMS_KEY_NAME = "projects/1/locations/global/keyRings/1/cryptoKeys/1" + KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" LOCATION = "us-central" @staticmethod @@ -1074,7 +1074,9 @@ def test_create_table_w_custom_property(self): self.assertEqual(got.table_id, self.TABLE_ID) def test_create_table_w_encryption_configuration(self): - from google.cloud.bigquery.table import EncryptionConfiguration + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) from google.cloud.bigquery.table import Table path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) @@ -1136,7 +1138,8 @@ def test_create_table_w_day_partition_and_expire(self): self.assertEqual(got.table_id, self.TABLE_ID) def test_create_table_w_schema_and_query(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) query = "SELECT * from %s:%s" % (self.DS_ID, self.TABLE_ID) @@ -1751,7 +1754,8 @@ def test_update_routine(self): self.assertEqual(req[1]["headers"]["If-Match"], "im-an-etag") def test_update_table(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table path = "projects/%s/datasets/%s/tables/%s" % ( self.PROJECT, @@ -1894,7 +1898,8 @@ def test_update_table_w_query(self): import datetime from google.cloud._helpers import UTC from google.cloud._helpers import _millis - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table path = "projects/%s/datasets/%s/tables/%s" % ( self.PROJECT, @@ -2952,8 +2957,26 @@ def test_list_jobs_w_time_filter(self): }, ) + def test_list_jobs_w_parent_job_filter(self): + from google.cloud.bigquery import job + + creds = _make_credentials() + client = self._make_one(self.PROJECT, creds) + conn = client._connection = make_connection({}, {}) + + parent_job_args = ["parent-job-123", job._AsyncJob("parent-job-123", client)] + + for parent_job in parent_job_args: + list(client.list_jobs(parent_job=parent_job)) + conn.api_request.assert_called_once_with( + method="GET", + path="/projects/%s/jobs" % self.PROJECT, + query_params={"projection": "full", "parentJobId": "parent-job-123"}, + ) + conn.api_request.reset_mock() + def test_load_table_from_uri(self): - from google.cloud.bigquery.job import LoadJob + from google.cloud.bigquery.job import LoadJob, LoadJobConfig JOB = "job_name" DESTINATION = "destination_table" @@ -2973,11 +2996,14 @@ def test_load_table_from_uri(self): } creds = _make_credentials() http = object() + job_config = LoadJobConfig() client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = make_connection(RESOURCE) destination = client.dataset(self.DS_ID).table(DESTINATION) - job = client.load_table_from_uri(SOURCE_URI, destination, job_id=JOB) + job = client.load_table_from_uri( + SOURCE_URI, destination, job_id=JOB, job_config=job_config + ) # Check that load_table_from_uri actually starts the job. conn.api_request.assert_called_once_with( @@ -2985,6 +3011,7 @@ def test_load_table_from_uri(self): ) self.assertIsInstance(job, LoadJob) + self.assertIsInstance(job._configuration, LoadJobConfig) self.assertIs(job._client, client) self.assertEqual(job.job_id, JOB) self.assertEqual(list(job.source_uris), [SOURCE_URI]) @@ -3080,6 +3107,26 @@ def test_load_table_from_uri_w_client_location(self): method="POST", path="/projects/other-project/jobs", data=resource ) + def test_load_table_from_uri_w_invalid_job_config(self): + from google.cloud.bigquery import job + + JOB = "job_name" + DESTINATION = "destination_table" + SOURCE_URI = "http://example.com/source.csv" + + creds = _make_credentials() + http = object() + job_config = job.CopyJobConfig() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + destination = client.dataset(self.DS_ID).table(DESTINATION) + + with self.assertRaises(TypeError) as exc: + client.load_table_from_uri( + SOURCE_URI, destination, job_id=JOB, job_config=job_config + ) + + self.assertIn("Expected an instance of LoadJobConfig", exc.exception.args[0]) + @staticmethod def _mock_requests_response(status_code, headers, content=b""): return mock.Mock( @@ -3127,7 +3174,7 @@ def _initiate_resumable_upload_helper(self, num_retries=None): # Check the returned values. self.assertIsInstance(upload, ResumableUpload) upload_url = ( - "https://www.googleapis.com/upload/bigquery/v2/projects/" + "https://bigquery.googleapis.com/upload/bigquery/v2/projects/" + self.PROJECT + "/jobs?uploadType=resumable" ) @@ -3196,7 +3243,7 @@ def _do_multipart_upload_success_helper(self, get_boundary, num_retries=None): get_boundary.assert_called_once_with() upload_url = ( - "https://www.googleapis.com/upload/bigquery/v2/projects/" + "https://bigquery.googleapis.com/upload/bigquery/v2/projects/" + self.PROJECT + "/jobs?uploadType=multipart" ) @@ -3402,6 +3449,66 @@ def test_copy_table_w_source_strings(self): ).table("destination_table") self.assertEqual(job.destination, expected_destination) + def test_copy_table_w_invalid_job_config(self): + from google.cloud.bigquery import job + + JOB = "job_name" + SOURCE = "source_table" + DESTINATION = "destination_table" + + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + job_config = job.ExtractJobConfig() + dataset = client.dataset(self.DS_ID) + source = dataset.table(SOURCE) + destination = dataset.table(DESTINATION) + with self.assertRaises(TypeError) as exc: + client.copy_table(source, destination, job_id=JOB, job_config=job_config) + + self.assertIn("Expected an instance of CopyJobConfig", exc.exception.args[0]) + + def test_copy_table_w_valid_job_config(self): + from google.cloud.bigquery.job import CopyJobConfig + + JOB = "job_name" + SOURCE = "source_table" + DESTINATION = "destination_table" + RESOURCE = { + "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, + "configuration": { + "copy": { + "sourceTables": [ + { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": SOURCE, + } + ], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": DESTINATION, + }, + } + }, + } + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + job_config = CopyJobConfig() + conn = client._connection = make_connection(RESOURCE) + dataset = client.dataset(self.DS_ID) + source = dataset.table(SOURCE) + destination = dataset.table(DESTINATION) + + job = client.copy_table(source, destination, job_id=JOB, job_config=job_config) + # Check that copy_table actually starts the job. + conn.api_request.assert_called_once_with( + method="POST", path="/projects/%s/jobs" % self.PROJECT, data=RESOURCE + ) + self.assertIsInstance(job._configuration, CopyJobConfig) + def test_extract_table(self): from google.cloud.bigquery.job import ExtractJob @@ -3442,6 +3549,24 @@ def test_extract_table(self): self.assertEqual(job.source, source) self.assertEqual(list(job.destination_uris), [DESTINATION]) + def test_extract_table_w_invalid_job_config(self): + from google.cloud.bigquery import job + + JOB = "job_id" + SOURCE = "source_table" + DESTINATION = "gs://bucket_name/object_name" + + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + dataset = client.dataset(self.DS_ID) + source = dataset.table(SOURCE) + job_config = job.LoadJobConfig() + with self.assertRaises(TypeError) as exc: + client.extract_table(source, DESTINATION, job_id=JOB, job_config=job_config) + + self.assertIn("Expected an instance of ExtractJobConfig", exc.exception.args[0]) + def test_extract_table_w_explicit_project(self): job_id = "job_id" source_id = "source_table" @@ -3725,6 +3850,35 @@ def test_query_w_explicit_job_config(self): method="POST", path="/projects/PROJECT/jobs", data=resource ) + def test_query_w_invalid_job_config(self): + from google.cloud.bigquery import QueryJobConfig, DatasetReference + from google.cloud.bigquery import job + + job_id = "some-job-id" + query = "select count(*) from persons" + creds = _make_credentials() + http = object() + default_job_config = QueryJobConfig() + default_job_config.default_dataset = DatasetReference( + self.PROJECT, "some-dataset" + ) + default_job_config.maximum_bytes_billed = 1000 + + client = self._make_one( + project=self.PROJECT, + credentials=creds, + _http=http, + default_query_job_config=default_job_config, + ) + + job_config = job.LoadJobConfig() + + with self.assertRaises(TypeError) as exc: + client.query( + query, job_id=job_id, location=self.LOCATION, job_config=job_config + ) + self.assertIn("Expected an instance of QueryJobConfig", exc.exception.args[0]) + def test_query_w_explicit_job_config_override(self): job_id = "some-job-id" query = "select count(*) from persons" @@ -3819,6 +3973,23 @@ def test_query_w_client_default_config_no_incoming(self): method="POST", path="/projects/PROJECT/jobs", data=resource ) + def test_query_w_invalid_default_job_config(self): + job_id = "some-job-id" + query = "select count(*) from persons" + creds = _make_credentials() + http = object() + default_job_config = object() + client = self._make_one( + project=self.PROJECT, + credentials=creds, + _http=http, + default_query_job_config=default_job_config, + ) + + with self.assertRaises(TypeError) as exc: + client.query(query, job_id=job_id, location=self.LOCATION) + self.assertIn("Expected an instance of QueryJobConfig", exc.exception.args[0]) + def test_query_w_client_location(self): job_id = "some-job-id" query = "select count(*) from persons" @@ -4005,7 +4176,7 @@ def test_insert_rows_w_schema(self): from google.cloud._helpers import UTC from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import _microseconds_from_datetime - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField WHEN_TS = 1437767599.006 WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace(tzinfo=UTC) @@ -4061,7 +4232,8 @@ def test_insert_rows_w_list_of_dictionaries(self): from google.cloud._helpers import UTC from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import _microseconds_from_datetime - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table WHEN_TS = 1437767599.006 WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace(tzinfo=UTC) @@ -4122,8 +4294,8 @@ def _row_data(row): ) def test_insert_rows_w_list_of_Rows(self): + from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table - from google.cloud.bigquery.table import SchemaField from google.cloud.bigquery.table import Row PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( @@ -4167,7 +4339,8 @@ def _row_data(row): ) def test_insert_rows_w_skip_invalid_and_ignore_unknown(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( self.PROJECT, @@ -4243,7 +4416,8 @@ def _row_data(row): ) def test_insert_rows_w_repeated_fields(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( self.PROJECT, @@ -4336,7 +4510,7 @@ def test_insert_rows_w_repeated_fields(self): ) def test_insert_rows_w_record_schema(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( self.PROJECT, @@ -4404,6 +4578,40 @@ def test_insert_rows_w_record_schema(self): method="POST", path="/%s" % PATH, data=SENT ) + def test_insert_rows_w_explicit_none_insert_ids(self): + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table + + PATH = "projects/{}/datasets/{}/tables/{}/insertAll".format( + self.PROJECT, self.DS_ID, self.TABLE_ID, + ) + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + conn = client._connection = make_connection({}) + schema = [ + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), + ] + table = Table(self.TABLE_REF, schema=schema) + ROWS = [ + {"full_name": "Phred Phlyntstone", "age": 32}, + {"full_name": "Bharney Rhubble", "age": 33}, + ] + + def _row_data(row): + row["age"] = str(row["age"]) + return row + + SENT = {"rows": [{"json": _row_data(row), "insertId": None} for row in ROWS]} + + errors = client.insert_rows(table, ROWS, row_ids=[None] * len(ROWS)) + + self.assertEqual(len(errors), 0) + conn.api_request.assert_called_once_with( + method="POST", path="/{}".format(PATH), data=SENT + ) + def test_insert_rows_errors(self): from google.cloud.bigquery.table import Table @@ -4431,6 +4639,7 @@ def test_insert_rows_errors(self): def test_insert_rows_w_numeric(self): from google.cloud.bigquery import table + from google.cloud.bigquery.schema import SchemaField project = "PROJECT" ds_id = "DS_ID" @@ -4440,10 +4649,7 @@ def test_insert_rows_w_numeric(self): client = self._make_one(project=project, credentials=creds, _http=http) conn = client._connection = make_connection({}) table_ref = DatasetReference(project, ds_id).table(table_id) - schema = [ - table.SchemaField("account", "STRING"), - table.SchemaField("balance", "NUMERIC"), - ] + schema = [SchemaField("account", "STRING"), SchemaField("balance", "NUMERIC")] insert_table = table.Table(table_ref, schema=schema) rows = [ ("Savings", decimal.Decimal("23.47")), @@ -4473,9 +4679,183 @@ def test_insert_rows_w_numeric(self): data=sent, ) + @unittest.skipIf(pandas is None, "Requires `pandas`") + def test_insert_rows_from_dataframe(self): + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table + + API_PATH = "/projects/{}/datasets/{}/tables/{}/insertAll".format( + self.PROJECT, self.DS_ID, self.TABLE_REF.table_id + ) + + dataframe = pandas.DataFrame( + [ + {"name": u"Little One", "age": 10, "adult": False}, + {"name": u"Young Gun", "age": 20, "adult": True}, + {"name": u"Dad", "age": 30, "adult": True}, + {"name": u"Stranger", "age": 40, "adult": True}, + ] + ) + + # create client + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + conn = client._connection = make_connection({}, {}) + + # create table + schema = [ + SchemaField("name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), + SchemaField("adult", "BOOLEAN", mode="REQUIRED"), + ] + table = Table(self.TABLE_REF, schema=schema) + + with mock.patch("uuid.uuid4", side_effect=map(str, range(len(dataframe)))): + error_info = client.insert_rows_from_dataframe( + table, dataframe, chunk_size=3 + ) + + self.assertEqual(len(error_info), 2) + for chunk_errors in error_info: + assert chunk_errors == [] + + EXPECTED_SENT_DATA = [ + { + "rows": [ + { + "insertId": "0", + "json": {"name": "Little One", "age": "10", "adult": "false"}, + }, + { + "insertId": "1", + "json": {"name": "Young Gun", "age": "20", "adult": "true"}, + }, + { + "insertId": "2", + "json": {"name": "Dad", "age": "30", "adult": "true"}, + }, + ] + }, + { + "rows": [ + { + "insertId": "3", + "json": {"name": "Stranger", "age": "40", "adult": "true"}, + } + ] + }, + ] + + actual_calls = conn.api_request.call_args_list + + for call, expected_data in six.moves.zip_longest( + actual_calls, EXPECTED_SENT_DATA + ): + expected_call = mock.call(method="POST", path=API_PATH, data=expected_data) + assert call == expected_call + + @unittest.skipIf(pandas is None, "Requires `pandas`") + def test_insert_rows_from_dataframe_many_columns(self): + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table + + API_PATH = "/projects/{}/datasets/{}/tables/{}/insertAll".format( + self.PROJECT, self.DS_ID, self.TABLE_REF.table_id + ) + N_COLUMNS = 256 # should be >= 256 + + dataframe = pandas.DataFrame( + [{"foo_{}".format(i): "bar_{}".format(i) for i in range(N_COLUMNS)}] + ) + + # create client + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + conn = client._connection = make_connection({}, {}) + + # create table + schema = [SchemaField("foo_{}".format(i), "STRING") for i in range(N_COLUMNS)] + table = Table(self.TABLE_REF, schema=schema) + + with mock.patch("uuid.uuid4", side_effect=map(str, range(len(dataframe)))): + error_info = client.insert_rows_from_dataframe( + table, dataframe, chunk_size=3 + ) + + assert len(error_info) == 1 + assert error_info[0] == [] + + EXPECTED_SENT_DATA = { + "rows": [ + { + "insertId": "0", + "json": { + "foo_{}".format(i): "bar_{}".format(i) for i in range(N_COLUMNS) + }, + } + ] + } + expected_call = mock.call(method="POST", path=API_PATH, data=EXPECTED_SENT_DATA) + + actual_calls = conn.api_request.call_args_list + assert len(actual_calls) == 1 + assert actual_calls[0] == expected_call + + @unittest.skipIf(pandas is None, "Requires `pandas`") + def test_insert_rows_from_dataframe_w_explicit_none_insert_ids(self): + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table + + API_PATH = "/projects/{}/datasets/{}/tables/{}/insertAll".format( + self.PROJECT, self.DS_ID, self.TABLE_REF.table_id + ) + + dataframe = pandas.DataFrame( + [ + {"name": u"Little One", "adult": False}, + {"name": u"Young Gun", "adult": True}, + ] + ) + + # create client + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + conn = client._connection = make_connection({}, {}) + + # create table + schema = [ + SchemaField("name", "STRING", mode="REQUIRED"), + SchemaField("adult", "BOOLEAN", mode="REQUIRED"), + ] + table = Table(self.TABLE_REF, schema=schema) + + error_info = client.insert_rows_from_dataframe( + table, dataframe, row_ids=[None] * len(dataframe) + ) + + self.assertEqual(len(error_info), 1) + assert error_info[0] == [] # no chunk errors + + EXPECTED_SENT_DATA = { + "rows": [ + {"insertId": None, "json": {"name": "Little One", "adult": "false"}}, + {"insertId": None, "json": {"name": "Young Gun", "adult": "true"}}, + ] + } + + actual_calls = conn.api_request.call_args_list + assert len(actual_calls) == 1 + assert actual_calls[0] == mock.call( + method="POST", path=API_PATH, data=EXPECTED_SENT_DATA + ) + def test_insert_rows_json(self): - from google.cloud.bigquery.table import Table, SchemaField from google.cloud.bigquery.dataset import DatasetReference + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table PROJECT = "PROJECT" DS_ID = "DS_ID" @@ -4541,6 +4921,27 @@ def test_insert_rows_json_with_string_id(self): data=expected, ) + def test_insert_rows_json_w_explicit_none_insert_ids(self): + rows = [{"col1": "val1"}, {"col2": "val2"}] + creds = _make_credentials() + http = object() + client = self._make_one( + project="default-project", credentials=creds, _http=http + ) + conn = client._connection = make_connection({}) + + errors = client.insert_rows_json( + "proj.dset.tbl", rows, row_ids=[None] * len(rows), + ) + + self.assertEqual(len(errors), 0) + expected = {"rows": [{"json": row, "insertId": None} for row in rows]} + conn.api_request.assert_called_once_with( + method="POST", + path="/projects/proj/datasets/dset/tables/tbl/insertAll", + data=expected, + ) + def test_list_partitions(self): from google.cloud.bigquery.table import Table @@ -4586,8 +4987,8 @@ def test_list_partitions_with_string_id(self): def test_list_rows(self): import datetime from google.cloud._helpers import UTC + from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table - from google.cloud.bigquery.table import SchemaField from google.cloud.bigquery.table import Row PATH = "projects/%s/datasets/%s/tables/%s/data" % ( @@ -4687,7 +5088,8 @@ def test_list_rows_empty_table(self): self.assertEqual(rows.total_rows, 0) def test_list_rows_query_params(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table creds = _make_credentials() http = object() @@ -4709,7 +5111,7 @@ def test_list_rows_query_params(self): self.assertEqual(req[1]["query_params"], test[1], "for kwargs %s" % test[0]) def test_list_rows_repeated_fields(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField PATH = "projects/%s/datasets/%s/tables/%s/data" % ( self.PROJECT, @@ -4769,7 +5171,8 @@ def test_list_rows_repeated_fields(self): ) def test_list_rows_w_record_schema(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table PATH = "projects/%s/datasets/%s/tables/%s/data" % ( self.PROJECT, @@ -5275,6 +5678,19 @@ def test_load_table_from_file_bad_mode(self): with pytest.raises(ValueError): client.load_table_from_file(file_obj, self.TABLE_REF) + def test_load_table_from_file_w_invalid_job_config(self): + from google.cloud.bigquery import job + + client = self._make_client() + gzip_file = self._make_gzip_file_obj(writable=True) + config = job.QueryJobConfig() + with pytest.raises(TypeError) as exc: + client.load_table_from_file( + gzip_file, self.TABLE_REF, job_id="job_id", job_config=config + ) + err_msg = str(exc.value) + assert "Expected an instance of LoadJobConfig" in err_msg + @unittest.skipIf(pandas is None, "Requires `pandas`") @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe(self): @@ -5579,6 +5995,43 @@ def test_load_table_from_dataframe_unknown_table(self): job_config=mock.ANY, ) + @unittest.skipIf(pandas is None, "Requires `pandas`") + def test_load_table_from_dataframe_no_schema_warning_wo_pyarrow(self): + client = self._make_client() + + # Pick at least one column type that translates to Pandas dtype + # "object". A string column matches that. + records = [{"name": "Monty", "age": 100}, {"name": "Python", "age": 60}] + dataframe = pandas.DataFrame(records) + + get_table_patch = mock.patch( + "google.cloud.bigquery.client.Client.get_table", + autospec=True, + side_effect=google.api_core.exceptions.NotFound("Table not found"), + ) + load_patch = mock.patch( + "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True + ) + pyarrow_patch = mock.patch("google.cloud.bigquery.client.pyarrow", None) + pyarrow_patch_helpers = mock.patch( + "google.cloud.bigquery._pandas_helpers.pyarrow", None + ) + catch_warnings = warnings.catch_warnings(record=True) + + with get_table_patch, load_patch, pyarrow_patch, pyarrow_patch_helpers, catch_warnings as warned: + client.load_table_from_dataframe( + dataframe, self.TABLE_REF, location=self.LOCATION + ) + + matches = [ + warning + for warning in warned + if warning.category in (DeprecationWarning, PendingDeprecationWarning) + and "could not be detected" in str(warning) + and "please provide a schema" in str(warning) + ] + assert matches, "A missing schema deprecation warning was not raised." + @unittest.skipIf(pandas is None, "Requires `pandas`") @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_struct_fields_error(self): @@ -5733,7 +6186,6 @@ def test_load_table_from_dataframe_w_partial_schema_extra_types(self): assert "unknown_col" in message @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_partial_schema_missing_types(self): from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job @@ -5750,10 +6202,13 @@ def test_load_table_from_dataframe_w_partial_schema_missing_types(self): load_patch = mock.patch( "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True ) + pyarrow_patch = mock.patch( + "google.cloud.bigquery._pandas_helpers.pyarrow", None + ) schema = (SchemaField("string_col", "STRING"),) job_config = job.LoadJobConfig(schema=schema) - with load_patch as load_table_from_file, warnings.catch_warnings( + with pyarrow_patch, load_patch as load_table_from_file, warnings.catch_warnings( record=True ) as warned: client.load_table_from_dataframe( @@ -5939,6 +6394,24 @@ def test_load_table_from_dataframe_w_nulls(self): assert sent_config.schema == schema assert sent_config.source_format == job.SourceFormat.PARQUET + @unittest.skipIf(pandas is None, "Requires `pandas`") + def test_load_table_from_dataframe_w_invaild_job_config(self): + from google.cloud.bigquery import job + + client = self._make_client() + + records = [{"float_column": 3.14, "struct_column": [{"foo": 1}, {"bar": -1}]}] + dataframe = pandas.DataFrame(data=records) + job_config = job.CopyJobConfig() + + with pytest.raises(TypeError) as exc: + client.load_table_from_dataframe( + dataframe, self.TABLE_REF, job_config=job_config, location=self.LOCATION + ) + + err_msg = str(exc.value) + assert "Expected an instance of LoadJobConfig" in err_msg + def test_load_table_from_json_basic_use(self): from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job @@ -6027,6 +6500,26 @@ def test_load_table_from_json_non_default_args(self): # all properties should have been cloned and sent to the backend assert sent_config._properties.get("load", {}).get("unknown_field") == "foobar" + def test_load_table_from_json_w_invalid_job_config(self): + from google.cloud.bigquery import job + + client = self._make_client() + json_rows = [ + {"name": "One", "age": 11, "birthday": "2008-09-10", "adult": False}, + {"name": "Two", "age": 22, "birthday": "1997-08-09", "adult": True}, + ] + job_config = job.CopyJobConfig() + with pytest.raises(TypeError) as exc: + client.load_table_from_json( + json_rows, + self.TABLE_REF, + job_config=job_config, + project="project-x", + location="EU", + ) + err_msg = str(exc.value) + assert "Expected an instance of LoadJobConfig" in err_msg + # Low-level tests @classmethod diff --git a/bigquery/tests/unit/test_dataset.py b/bigquery/tests/unit/test_dataset.py index 26b1729a240c..ac13e00932ba 100644 --- a/bigquery/tests/unit/test_dataset.py +++ b/bigquery/tests/unit/test_dataset.py @@ -275,6 +275,7 @@ class TestDataset(unittest.TestCase): PROJECT = "project" DS_ID = "dataset-id" DS_REF = DatasetReference(PROJECT, DS_ID) + KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" @staticmethod def _get_target_class(): @@ -314,6 +315,7 @@ def _make_resource(self): {"role": "WRITER", "specialGroup": "projectWriters"}, {"role": "READER", "specialGroup": "projectReaders"}, ], + "defaultEncryptionConfiguration": {"kmsKeyName": self.KMS_KEY_NAME}, } def _verify_access_entry(self, access_entries, resource): @@ -369,6 +371,13 @@ def _verify_resource_properties(self, dataset, resource): self.assertEqual(dataset.description, resource.get("description")) self.assertEqual(dataset.friendly_name, resource.get("friendlyName")) self.assertEqual(dataset.location, resource.get("location")) + if "defaultEncryptionConfiguration" in resource: + self.assertEqual( + dataset.default_encryption_configuration.kms_key_name, + resource.get("defaultEncryptionConfiguration")["kmsKeyName"], + ) + else: + self.assertIsNone(dataset.default_encryption_configuration) if "access" in resource: self._verify_access_entry(dataset.access_entries, resource) @@ -454,6 +463,14 @@ def test_access_entries_setter(self): dataset.access_entries = [phred, bharney] self.assertEqual(dataset.access_entries, [phred, bharney]) + def test_default_partition_expiration_ms(self): + dataset = self._make_one("proj.dset") + assert dataset.default_partition_expiration_ms is None + dataset.default_partition_expiration_ms = 12345 + assert dataset.default_partition_expiration_ms == 12345 + dataset.default_partition_expiration_ms = None + assert dataset.default_partition_expiration_ms is None + def test_default_table_expiration_ms_setter_bad_value(self): dataset = self._make_one(self.DS_REF) with self.assertRaises(ValueError): @@ -550,6 +567,22 @@ def test_to_api_repr_w_custom_field(self): } self.assertEqual(resource, exp_resource) + def test_default_encryption_configuration_setter(self): + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) + + dataset = self._make_one(self.DS_REF) + encryption_configuration = EncryptionConfiguration( + kms_key_name=self.KMS_KEY_NAME + ) + dataset.default_encryption_configuration = encryption_configuration + self.assertEqual( + dataset.default_encryption_configuration.kms_key_name, self.KMS_KEY_NAME + ) + dataset.default_encryption_configuration = None + self.assertIsNone(dataset.default_encryption_configuration) + def test_from_string(self): cls = self._get_target_class() got = cls.from_string("string-project.string_dataset") diff --git a/bigquery/tests/unit/test_dbapi__helpers.py b/bigquery/tests/unit/test_dbapi__helpers.py index bcc3e0879f87..45c690ede363 100644 --- a/bigquery/tests/unit/test_dbapi__helpers.py +++ b/bigquery/tests/unit/test_dbapi__helpers.py @@ -66,6 +66,61 @@ def test_scalar_to_query_parameter_w_special_floats(self): self.assertTrue(math.isinf(inf_parameter.value)) self.assertEqual(inf_parameter.type_, "FLOAT64") + def test_array_to_query_parameter_valid_argument(self): + expected_types = [ + ([True, False], "BOOL"), + ([123, -456, 0], "INT64"), + ([1.25, 2.50], "FLOAT64"), + ([decimal.Decimal("1.25")], "NUMERIC"), + ([b"foo", b"bar"], "BYTES"), + ([u"foo", u"bar"], "STRING"), + ([datetime.date(2017, 4, 1), datetime.date(2018, 4, 1)], "DATE"), + ([datetime.time(12, 34, 56), datetime.time(10, 20, 30)], "TIME"), + ( + [ + datetime.datetime(2012, 3, 4, 5, 6, 7), + datetime.datetime(2013, 1, 1, 10, 20, 30), + ], + "DATETIME", + ), + ( + [ + datetime.datetime( + 2012, 3, 4, 5, 6, 7, tzinfo=google.cloud._helpers.UTC + ), + datetime.datetime( + 2013, 1, 1, 10, 20, 30, tzinfo=google.cloud._helpers.UTC + ), + ], + "TIMESTAMP", + ), + ] + + for values, expected_type in expected_types: + msg = "value: {} expected_type: {}".format(values, expected_type) + parameter = _helpers.array_to_query_parameter(values) + self.assertIsNone(parameter.name, msg=msg) + self.assertEqual(parameter.array_type, expected_type, msg=msg) + self.assertEqual(parameter.values, values, msg=msg) + named_param = _helpers.array_to_query_parameter(values, name="my_param") + self.assertEqual(named_param.name, "my_param", msg=msg) + self.assertEqual(named_param.array_type, expected_type, msg=msg) + self.assertEqual(named_param.values, values, msg=msg) + + def test_array_to_query_parameter_empty_argument(self): + with self.assertRaises(exceptions.ProgrammingError): + _helpers.array_to_query_parameter([]) + + def test_array_to_query_parameter_unsupported_sequence(self): + unsupported_iterables = [{10, 20, 30}, u"foo", b"bar", bytearray([65, 75, 85])] + for iterable in unsupported_iterables: + with self.assertRaises(exceptions.ProgrammingError): + _helpers.array_to_query_parameter(iterable) + + def test_array_to_query_parameter_sequence_w_invalid_elements(self): + with self.assertRaises(exceptions.ProgrammingError): + _helpers.array_to_query_parameter([object(), 2, 7]) + def test_to_query_parameters_w_dict(self): parameters = {"somebool": True, "somestring": u"a-string-value"} query_parameters = _helpers.to_query_parameters(parameters) @@ -82,6 +137,23 @@ def test_to_query_parameters_w_dict(self): ), ) + def test_to_query_parameters_w_dict_array_param(self): + parameters = {"somelist": [10, 20]} + query_parameters = _helpers.to_query_parameters(parameters) + + self.assertEqual(len(query_parameters), 1) + param = query_parameters[0] + + self.assertEqual(param.name, "somelist") + self.assertEqual(param.array_type, "INT64") + self.assertEqual(param.values, [10, 20]) + + def test_to_query_parameters_w_dict_dict_param(self): + parameters = {"my_param": {"foo": "bar"}} + + with self.assertRaises(NotImplementedError): + _helpers.to_query_parameters(parameters) + def test_to_query_parameters_w_list(self): parameters = [True, u"a-string-value"] query_parameters = _helpers.to_query_parameters(parameters) @@ -92,3 +164,24 @@ def test_to_query_parameters_w_list(self): sorted(query_parameter_tuples), sorted([(None, "BOOL", True), (None, "STRING", u"a-string-value")]), ) + + def test_to_query_parameters_w_list_array_param(self): + parameters = [[10, 20]] + query_parameters = _helpers.to_query_parameters(parameters) + + self.assertEqual(len(query_parameters), 1) + param = query_parameters[0] + + self.assertIsNone(param.name) + self.assertEqual(param.array_type, "INT64") + self.assertEqual(param.values, [10, 20]) + + def test_to_query_parameters_w_list_dict_param(self): + parameters = [{"foo": "bar"}] + + with self.assertRaises(NotImplementedError): + _helpers.to_query_parameters(parameters) + + def test_to_query_parameters_none_argument(self): + query_parameters = _helpers.to_query_parameters(None) + self.assertEqual(query_parameters, []) diff --git a/bigquery/tests/unit/test_dbapi_cursor.py b/bigquery/tests/unit/test_dbapi_cursor.py index 4a675c73958d..4ccd5e71af72 100644 --- a/bigquery/tests/unit/test_dbapi_cursor.py +++ b/bigquery/tests/unit/test_dbapi_cursor.py @@ -191,6 +191,20 @@ def test_execute_custom_job_id(self): self.assertEqual(args[0], "SELECT 1;") self.assertEqual(kwargs["job_id"], "foo") + def test_execute_custom_job_config(self): + from google.cloud.bigquery.dbapi import connect + from google.cloud.bigquery import job + + config = job.QueryJobConfig(use_legacy_sql=True) + client = self._mock_client(rows=[], num_dml_affected_rows=0) + connection = connect(client) + cursor = connection.cursor() + cursor.execute("SELECT 1;", job_id="foo", job_config=config) + args, kwargs = client.query.call_args + self.assertEqual(args[0], "SELECT 1;") + self.assertEqual(kwargs["job_id"], "foo") + self.assertEqual(kwargs["job_config"], config) + def test_execute_w_dml(self): from google.cloud.bigquery.dbapi import connect diff --git a/bigquery/tests/unit/test_encryption_configuration.py b/bigquery/tests/unit/test_encryption_configuration.py new file mode 100644 index 000000000000..f432a903b4cc --- /dev/null +++ b/bigquery/tests/unit/test_encryption_configuration.py @@ -0,0 +1,111 @@ +# Copyright 2015 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import mock + + +class TestEncryptionConfiguration(unittest.TestCase): + KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" + + @staticmethod + def _get_target_class(): + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) + + return EncryptionConfiguration + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_ctor_defaults(self): + encryption_config = self._make_one() + self.assertIsNone(encryption_config.kms_key_name) + + def test_ctor_with_key(self): + encryption_config = self._make_one(kms_key_name=self.KMS_KEY_NAME) + self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) + + def test_kms_key_name_setter(self): + encryption_config = self._make_one() + self.assertIsNone(encryption_config.kms_key_name) + encryption_config.kms_key_name = self.KMS_KEY_NAME + self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) + encryption_config.kms_key_name = None + self.assertIsNone(encryption_config.kms_key_name) + + def test_from_api_repr(self): + RESOURCE = {"kmsKeyName": self.KMS_KEY_NAME} + klass = self._get_target_class() + encryption_config = klass.from_api_repr(RESOURCE) + self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) + + def test_to_api_repr(self): + encryption_config = self._make_one(kms_key_name=self.KMS_KEY_NAME) + resource = encryption_config.to_api_repr() + self.assertEqual(resource, {"kmsKeyName": self.KMS_KEY_NAME}) + + def test___eq___wrong_type(self): + encryption_config = self._make_one() + other = object() + self.assertNotEqual(encryption_config, other) + self.assertEqual(encryption_config, mock.ANY) + + def test___eq___kms_key_name_mismatch(self): + encryption_config = self._make_one() + other = self._make_one(self.KMS_KEY_NAME) + self.assertNotEqual(encryption_config, other) + + def test___eq___hit(self): + encryption_config = self._make_one(self.KMS_KEY_NAME) + other = self._make_one(self.KMS_KEY_NAME) + self.assertEqual(encryption_config, other) + + def test___ne___wrong_type(self): + encryption_config = self._make_one() + other = object() + self.assertNotEqual(encryption_config, other) + self.assertEqual(encryption_config, mock.ANY) + + def test___ne___same_value(self): + encryption_config1 = self._make_one(self.KMS_KEY_NAME) + encryption_config2 = self._make_one(self.KMS_KEY_NAME) + # unittest ``assertEqual`` uses ``==`` not ``!=``. + comparison_val = encryption_config1 != encryption_config2 + self.assertFalse(comparison_val) + + def test___ne___different_values(self): + encryption_config1 = self._make_one() + encryption_config2 = self._make_one(self.KMS_KEY_NAME) + self.assertNotEqual(encryption_config1, encryption_config2) + + def test___hash__set_equality(self): + encryption_config1 = self._make_one(self.KMS_KEY_NAME) + encryption_config2 = self._make_one(self.KMS_KEY_NAME) + set_one = {encryption_config1, encryption_config2} + set_two = {encryption_config1, encryption_config2} + self.assertEqual(set_one, set_two) + + def test___hash__not_equals(self): + encryption_config1 = self._make_one() + encryption_config2 = self._make_one(self.KMS_KEY_NAME) + set_one = {encryption_config1} + set_two = {encryption_config2} + self.assertNotEqual(set_one, set_two) + + def test___repr__(self): + encryption_config = self._make_one(self.KMS_KEY_NAME) + expected = "EncryptionConfiguration({})".format(self.KMS_KEY_NAME) + self.assertEqual(repr(encryption_config), expected) diff --git a/bigquery/tests/unit/test_external_config.py b/bigquery/tests/unit/test_external_config.py index ddf95e317969..dab4391cbe04 100644 --- a/bigquery/tests/unit/test_external_config.py +++ b/bigquery/tests/unit/test_external_config.py @@ -130,7 +130,10 @@ def test_from_api_repr_sheets(self): self.BASE_RESOURCE, { "sourceFormat": "GOOGLE_SHEETS", - "googleSheetsOptions": {"skipLeadingRows": "123"}, + "googleSheetsOptions": { + "skipLeadingRows": "123", + "range": "Sheet1!A5:B10", + }, }, ) @@ -140,14 +143,17 @@ def test_from_api_repr_sheets(self): self.assertEqual(ec.source_format, "GOOGLE_SHEETS") self.assertIsInstance(ec.options, external_config.GoogleSheetsOptions) self.assertEqual(ec.options.skip_leading_rows, 123) + self.assertEqual(ec.options.range, "Sheet1!A5:B10") got_resource = ec.to_api_repr() self.assertEqual(got_resource, resource) del resource["googleSheetsOptions"]["skipLeadingRows"] + del resource["googleSheetsOptions"]["range"] ec = external_config.ExternalConfig.from_api_repr(resource) self.assertIsNone(ec.options.skip_leading_rows) + self.assertIsNone(ec.options.range) got_resource = ec.to_api_repr() self.assertEqual(got_resource, resource) @@ -155,11 +161,12 @@ def test_to_api_repr_sheets(self): ec = external_config.ExternalConfig("GOOGLE_SHEETS") options = external_config.GoogleSheetsOptions() options.skip_leading_rows = 123 + options.range = "Sheet1!A5:B10" ec._options = options exp_resource = { "sourceFormat": "GOOGLE_SHEETS", - "googleSheetsOptions": {"skipLeadingRows": "123"}, + "googleSheetsOptions": {"skipLeadingRows": "123", "range": "Sheet1!A5:B10"}, } got_resource = ec.to_api_repr() diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index 98090a5257fd..a2aeb5efbc4a 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -73,7 +73,7 @@ def _make_job_resource( started=False, ended=False, etag="abc-def-hjk", - endpoint="https://www.googleapis.com", + endpoint="https://bigquery.googleapis.com", job_type="load", job_id="a-random-id", project_id="some-project", @@ -268,6 +268,53 @@ def test_job_type(self): self.assertEqual(derived.job_type, "derived") + def test_parent_job_id(self): + client = _make_client(project=self.PROJECT) + job = self._make_one(self.JOB_ID, client) + + self.assertIsNone(job.parent_job_id) + job._properties["statistics"] = {"parentJobId": "parent-job-123"} + self.assertEqual(job.parent_job_id, "parent-job-123") + + def test_script_statistics(self): + client = _make_client(project=self.PROJECT) + job = self._make_one(self.JOB_ID, client) + + self.assertIsNone(job.script_statistics) + job._properties["statistics"] = { + "scriptStatistics": { + "evaluationKind": "EXPRESSION", + "stackFrames": [ + { + "startLine": 5, + "startColumn": 29, + "endLine": 9, + "endColumn": 14, + "text": "QUERY TEXT", + } + ], + } + } + script_stats = job.script_statistics + self.assertEqual(script_stats.evaluation_kind, "EXPRESSION") + stack_frames = script_stats.stack_frames + self.assertEqual(len(stack_frames), 1) + stack_frame = stack_frames[0] + self.assertIsNone(stack_frame.procedure_id) + self.assertEqual(stack_frame.start_line, 5) + self.assertEqual(stack_frame.start_column, 29) + self.assertEqual(stack_frame.end_line, 9) + self.assertEqual(stack_frame.end_column, 14) + self.assertEqual(stack_frame.text, "QUERY TEXT") + + def test_num_child_jobs(self): + client = _make_client(project=self.PROJECT) + job = self._make_one(self.JOB_ID, client) + + self.assertEqual(job.num_child_jobs, 0) + job._properties["statistics"] = {"numChildJobs": "17"} + self.assertEqual(job.num_child_jobs, 17) + def test_labels_miss(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) @@ -796,7 +843,7 @@ def test__set_future_result_w_done_wo_result_set_w_error(self): set_exception.assert_called_once() args, kw = set_exception.call_args - exception, = args + (exception,) = args self.assertIsInstance(exception, NotFound) self.assertEqual(exception.message, "testing") self.assertEqual(kw, {}) @@ -1022,7 +1069,7 @@ class _Base(object): from google.cloud.bigquery.dataset import DatasetReference from google.cloud.bigquery.table import TableReference - ENDPOINT = "https://www.googleapis.com" + ENDPOINT = "https://bigquery.googleapis.com" PROJECT = "project" SOURCE1 = "http://example.com/source1.csv" DS_ID = "dataset_id" @@ -1030,7 +1077,7 @@ class _Base(object): TABLE_ID = "table_id" TABLE_REF = TableReference(DS_REF, TABLE_ID) JOB_ID = "JOB_ID" - KMS_KEY_NAME = "projects/1/locations/global/keyRings/1/cryptoKeys/1" + KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) @@ -1229,7 +1276,9 @@ def test_destination_encryption_configuration_missing(self): self.assertIsNone(config.destination_encryption_configuration) def test_destination_encryption_configuration_hit(self): - from google.cloud.bigquery.table import EncryptionConfiguration + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) kms_key_name = "kms-key-name" encryption_configuration = EncryptionConfiguration(kms_key_name) @@ -1242,7 +1291,9 @@ def test_destination_encryption_configuration_hit(self): ) def test_destination_encryption_configuration_setter(self): - from google.cloud.bigquery.table import EncryptionConfiguration + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) kms_key_name = "kms-key-name" encryption_configuration = EncryptionConfiguration(kms_key_name) @@ -1481,7 +1532,7 @@ def test_schema_hit(self): self.assertEqual(all_props, SchemaField.from_api_repr(all_props_repr)) self.assertEqual(minimal, SchemaField.from_api_repr(minimal_repr)) - def test_schema_setter(self): + def test_schema_setter_fields(self): from google.cloud.bigquery.schema import SchemaField config = self._get_target_class()() @@ -1504,6 +1555,42 @@ def test_schema_setter(self): config._properties["load"]["schema"], {"fields": [full_name_repr, age_repr]} ) + def test_schema_setter_valid_mappings_list(self): + config = self._get_target_class()() + + schema = [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "age", "type": "INTEGER", "mode": "REQUIRED"}, + ] + config.schema = schema + + full_name_repr = { + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, + } + age_repr = { + "name": "age", + "type": "INTEGER", + "mode": "REQUIRED", + "description": None, + } + self.assertEqual( + config._properties["load"]["schema"], {"fields": [full_name_repr, age_repr]} + ) + + def test_schema_setter_invalid_mappings_list(self): + config = self._get_target_class()() + + schema = [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "age", "typeoo": "INTEGER", "mode": "REQUIRED"}, + ] + + with self.assertRaises(Exception): + config.schema = schema + def test_schema_setter_unsetting_schema(self): from google.cloud.bigquery.schema import SchemaField @@ -1587,6 +1674,44 @@ def test_source_format_setter(self): config.source_format = source_format self.assertEqual(config._properties["load"]["sourceFormat"], source_format) + def test_range_partitioning_w_none(self): + object_under_test = self._get_target_class()() + assert object_under_test.range_partitioning is None + + def test_range_partitioning_w_value(self): + object_under_test = self._get_target_class()() + object_under_test._properties["load"]["rangePartitioning"] = { + "field": "column_one", + "range": {"start": 1, "end": 1000, "interval": 10}, + } + object_under_test.range_partitioning.field == "column_one" + object_under_test.range_partitioning.range_.start == 1 + object_under_test.range_partitioning.range_.end == 1000 + object_under_test.range_partitioning.range_.interval == 10 + + def test_range_partitioning_setter(self): + from google.cloud.bigquery.table import PartitionRange + from google.cloud.bigquery.table import RangePartitioning + + object_under_test = self._get_target_class()() + object_under_test.range_partitioning = RangePartitioning( + field="column_one", range_=PartitionRange(start=1, end=1000, interval=10) + ) + object_under_test.range_partitioning.field == "column_one" + object_under_test.range_partitioning.range_.start == 1 + object_under_test.range_partitioning.range_.end == 1000 + object_under_test.range_partitioning.range_.interval == 10 + + def test_range_partitioning_setter_w_none(self): + object_under_test = self._get_target_class()() + object_under_test.range_partitioning = None + assert object_under_test.range_partitioning is None + + def test_range_partitioning_setter_w_wrong_type(self): + object_under_test = self._get_target_class()() + with pytest.raises(ValueError, match="RangePartitioning"): + object_under_test.range_partitioning = object() + def test_time_partitioning_miss(self): config = self._get_target_class()() self.assertIsNone(config.time_partitioning) @@ -1841,6 +1966,7 @@ def test_ctor(self): self.assertIsNone(job.destination_encryption_configuration) self.assertIsNone(job.destination_table_description) self.assertIsNone(job.destination_table_friendly_name) + self.assertIsNone(job.range_partitioning) self.assertIsNone(job.time_partitioning) self.assertIsNone(job.use_avro_logical_types) self.assertIsNone(job.clustering_fields) @@ -2439,7 +2565,9 @@ def test_ctor_w_properties(self): self.assertEqual(config.write_disposition, write_disposition) def test_to_api_repr_with_encryption(self): - from google.cloud.bigquery.table import EncryptionConfiguration + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) config = self._make_one() config.destination_encryption_configuration = EncryptionConfiguration( @@ -3275,6 +3403,44 @@ def test_destinaton_w_string(self): expected = table.TableReference.from_string(destination) self.assertEqual(config.destination, expected) + def test_range_partitioning_w_none(self): + object_under_test = self._get_target_class()() + assert object_under_test.range_partitioning is None + + def test_range_partitioning_w_value(self): + object_under_test = self._get_target_class()() + object_under_test._properties["query"]["rangePartitioning"] = { + "field": "column_one", + "range": {"start": 1, "end": 1000, "interval": 10}, + } + object_under_test.range_partitioning.field == "column_one" + object_under_test.range_partitioning.range_.start == 1 + object_under_test.range_partitioning.range_.end == 1000 + object_under_test.range_partitioning.range_.interval == 10 + + def test_range_partitioning_setter(self): + from google.cloud.bigquery.table import PartitionRange + from google.cloud.bigquery.table import RangePartitioning + + object_under_test = self._get_target_class()() + object_under_test.range_partitioning = RangePartitioning( + field="column_one", range_=PartitionRange(start=1, end=1000, interval=10) + ) + object_under_test.range_partitioning.field == "column_one" + object_under_test.range_partitioning.range_.start == 1 + object_under_test.range_partitioning.range_.end == 1000 + object_under_test.range_partitioning.range_.interval == 10 + + def test_range_partitioning_setter_w_none(self): + object_under_test = self._get_target_class()() + object_under_test.range_partitioning = None + assert object_under_test.range_partitioning is None + + def test_range_partitioning_setter_w_wrong_type(self): + object_under_test = self._get_target_class()() + with pytest.raises(ValueError, match="RangePartitioning"): + object_under_test.range_partitioning = object() + def test_time_partitioning(self): from google.cloud.bigquery import table @@ -3364,7 +3530,9 @@ def test_to_api_repr_normal(self): self.assertEqual(resource["someNewProperty"], "Woohoo, alpha stuff.") def test_to_api_repr_with_encryption(self): - from google.cloud.bigquery.table import EncryptionConfiguration + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) config = self._make_one() config.destination_encryption_configuration = EncryptionConfiguration( @@ -3573,6 +3741,7 @@ def test_ctor_defaults(self): self.assertIsNone(job.maximum_bytes_billed) self.assertIsNone(job.table_definitions) self.assertIsNone(job.destination_encryption_configuration) + self.assertIsNone(job.range_partitioning) self.assertIsNone(job.time_partitioning) self.assertIsNone(job.clustering_fields) self.assertIsNone(job.schema_update_options) @@ -4337,8 +4506,10 @@ def test_result_error(self): self.assertIsInstance(exc_info.exception, exceptions.GoogleCloudError) self.assertEqual(exc_info.exception.code, http_client.BAD_REQUEST) - full_text = str(exc_info.exception) + exc_job_instance = getattr(exc_info.exception, "query_job", None) + self.assertIs(exc_job_instance, job) + full_text = str(exc_info.exception) assert job.job_id in full_text assert "Query Job SQL Follows" in full_text @@ -4370,8 +4541,10 @@ def test__begin_error(self): self.assertIsInstance(exc_info.exception, exceptions.GoogleCloudError) self.assertEqual(exc_info.exception.code, http_client.BAD_REQUEST) - full_text = str(exc_info.exception) + exc_job_instance = getattr(exc_info.exception, "query_job", None) + self.assertIs(exc_job_instance, job) + full_text = str(exc_info.exception) assert job.job_id in full_text assert "Query Job SQL Follows" in full_text @@ -5319,6 +5492,92 @@ def test_end(self): self.assertEqual(entry.end.strftime(_RFC3339_MICROS), self.END_RFC3339_MICROS) +class TestScriptStackFrame(unittest.TestCase, _Base): + def _make_one(self, resource): + from google.cloud.bigquery.job import ScriptStackFrame + + return ScriptStackFrame(resource) + + def test_procedure_id(self): + frame = self._make_one({"procedureId": "some-procedure"}) + self.assertEqual(frame.procedure_id, "some-procedure") + del frame._properties["procedureId"] + self.assertIsNone(frame.procedure_id) + + def test_start_line(self): + frame = self._make_one({"startLine": 5}) + self.assertEqual(frame.start_line, 5) + frame._properties["startLine"] = "5" + self.assertEqual(frame.start_line, 5) + + def test_start_column(self): + frame = self._make_one({"startColumn": 29}) + self.assertEqual(frame.start_column, 29) + frame._properties["startColumn"] = "29" + self.assertEqual(frame.start_column, 29) + + def test_end_line(self): + frame = self._make_one({"endLine": 9}) + self.assertEqual(frame.end_line, 9) + frame._properties["endLine"] = "9" + self.assertEqual(frame.end_line, 9) + + def test_end_column(self): + frame = self._make_one({"endColumn": 14}) + self.assertEqual(frame.end_column, 14) + frame._properties["endColumn"] = "14" + self.assertEqual(frame.end_column, 14) + + def test_text(self): + frame = self._make_one({"text": "QUERY TEXT"}) + self.assertEqual(frame.text, "QUERY TEXT") + + +class TestScriptStatistics(unittest.TestCase, _Base): + def _make_one(self, resource): + from google.cloud.bigquery.job import ScriptStatistics + + return ScriptStatistics(resource) + + def test_evalutation_kind(self): + stats = self._make_one({"evaluationKind": "EXPRESSION"}) + self.assertEqual(stats.evaluation_kind, "EXPRESSION") + self.assertEqual(stats.stack_frames, []) + + def test_stack_frames(self): + stats = self._make_one( + { + "stackFrames": [ + { + "procedureId": "some-procedure", + "startLine": 5, + "startColumn": 29, + "endLine": 9, + "endColumn": 14, + "text": "QUERY TEXT", + }, + {}, + ] + } + ) + stack_frames = stats.stack_frames + self.assertEqual(len(stack_frames), 2) + stack_frame = stack_frames[0] + self.assertEqual(stack_frame.procedure_id, "some-procedure") + self.assertEqual(stack_frame.start_line, 5) + self.assertEqual(stack_frame.start_column, 29) + self.assertEqual(stack_frame.end_line, 9) + self.assertEqual(stack_frame.end_column, 14) + self.assertEqual(stack_frame.text, "QUERY TEXT") + stack_frame = stack_frames[1] + self.assertIsNone(stack_frame.procedure_id) + self.assertIsNone(stack_frame.start_line) + self.assertIsNone(stack_frame.start_column) + self.assertIsNone(stack_frame.end_line) + self.assertIsNone(stack_frame.end_column) + self.assertIsNone(stack_frame.text) + + class TestTimelineEntry(unittest.TestCase, _Base): ELAPSED_MS = 101 ACTIVE_UNITS = 50 diff --git a/bigquery/tests/unit/test_magics.py b/bigquery/tests/unit/test_magics.py index fbea9bdd9050..6ff9819854a8 100644 --- a/bigquery/tests/unit/test_magics.py +++ b/bigquery/tests/unit/test_magics.py @@ -39,6 +39,7 @@ from google.cloud import bigquery_storage_v1beta1 except ImportError: # pragma: NO COVER bigquery_storage_v1beta1 = None +from google.cloud import bigquery from google.cloud.bigquery import job from google.cloud.bigquery import table from google.cloud.bigquery import magics @@ -336,6 +337,37 @@ def test__make_bqstorage_client_true_missing_gapic(missing_grpcio_lib): assert "grpcio" in str(exc_context.value) +def test__create_dataset_if_necessary_exists(): + project = "project_id" + dataset_id = "dataset_id" + dataset_reference = bigquery.dataset.DatasetReference(project, dataset_id) + dataset = bigquery.Dataset(dataset_reference) + client_patch = mock.patch( + "google.cloud.bigquery.magics.bigquery.Client", autospec=True + ) + with client_patch as client_mock: + client = client_mock() + client.project = project + client.get_dataset.result_value = dataset + magics._create_dataset_if_necessary(client, dataset_id) + client.create_dataset.assert_not_called() + + +def test__create_dataset_if_necessary_not_exist(): + project = "project_id" + dataset_id = "dataset_id" + client_patch = mock.patch( + "google.cloud.bigquery.magics.bigquery.Client", autospec=True + ) + with client_patch as client_mock: + client = client_mock() + client.location = "us" + client.project = project + client.get_dataset.side_effect = exceptions.NotFound("dataset not found") + magics._create_dataset_if_necessary(client, dataset_id) + client.create_dataset.assert_called_once() + + @pytest.mark.usefixtures("ipython_interactive") def test_extension_load(): ip = IPython.get_ipython() @@ -414,7 +446,7 @@ def test_bigquery_magic_with_legacy_sql(): with run_query_patch as run_query_mock: ip.run_cell_magic("bigquery", "--use_legacy_sql", "SELECT 17 AS num") - job_config_used = run_query_mock.call_args_list[0][0][-1] + job_config_used = run_query_mock.call_args_list[0][1]["job_config"] assert job_config_used.use_legacy_sql is True @@ -645,6 +677,165 @@ def test_bigquery_magic_without_bqstorage(monkeypatch): assert isinstance(return_value, pandas.DataFrame) +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_w_max_results_invalid(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + client_query_patch = mock.patch( + "google.cloud.bigquery.client.Client.query", autospec=True + ) + + sql = "SELECT 17 AS num" + + with pytest.raises(ValueError), default_patch, client_query_patch: + ip.run_cell_magic("bigquery", "--max_results=abc", sql) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_w_max_results_valid_calls_queryjob_result(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + client_query_patch = mock.patch( + "google.cloud.bigquery.client.Client.query", autospec=True + ) + + sql = "SELECT 17 AS num" + + query_job_mock = mock.create_autospec( + google.cloud.bigquery.job.QueryJob, instance=True + ) + + with client_query_patch as client_query_mock, default_patch: + client_query_mock.return_value = query_job_mock + ip.run_cell_magic("bigquery", "--max_results=5", sql) + + query_job_mock.result.assert_called_with(max_results=5) + + +def test_bigquery_magic_w_table_id_invalid(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + + list_rows_patch = mock.patch( + "google.cloud.bigquery.magics.bigquery.Client.list_rows", + autospec=True, + side_effect=exceptions.BadRequest("Not a valid table ID"), + ) + + table_id = "not-a-real-table" + + with list_rows_patch, default_patch, io.capture_output() as captured_io: + ip.run_cell_magic("bigquery", "df", table_id) + + output = captured_io.stderr + assert "Could not save output to variable" in output + assert "400 Not a valid table ID" in output + assert "Traceback (most recent call last)" not in output + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_w_table_id_and_destination_var(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + + row_iterator_mock = mock.create_autospec( + google.cloud.bigquery.table.RowIterator, instance=True + ) + + client_patch = mock.patch( + "google.cloud.bigquery.magics.bigquery.Client", autospec=True + ) + + table_id = "bigquery-public-data.samples.shakespeare" + result = pandas.DataFrame([17], columns=["num"]) + + with client_patch as client_mock, default_patch: + client_mock().list_rows.return_value = row_iterator_mock + row_iterator_mock.to_dataframe.return_value = result + + ip.run_cell_magic("bigquery", "df", table_id) + + assert "df" in ip.user_ns + df = ip.user_ns["df"] + + assert isinstance(df, pandas.DataFrame) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_w_table_id_and_bqstorage_client(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + + row_iterator_mock = mock.create_autospec( + google.cloud.bigquery.table.RowIterator, instance=True + ) + + client_patch = mock.patch( + "google.cloud.bigquery.magics.bigquery.Client", autospec=True + ) + + bqstorage_mock = mock.create_autospec( + bigquery_storage_v1beta1.BigQueryStorageClient + ) + bqstorage_instance_mock = mock.create_autospec( + bigquery_storage_v1beta1.BigQueryStorageClient, instance=True + ) + bqstorage_mock.return_value = bqstorage_instance_mock + bqstorage_client_patch = mock.patch( + "google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient", bqstorage_mock + ) + + table_id = "bigquery-public-data.samples.shakespeare" + + with default_patch, client_patch as client_mock, bqstorage_client_patch: + client_mock().list_rows.return_value = row_iterator_mock + + ip.run_cell_magic("bigquery", "--use_bqstorage_api --max_results=5", table_id) + row_iterator_mock.to_dataframe.assert_called_once_with( + bqstorage_client=bqstorage_instance_mock + ) + + @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_dryrun_option_sets_job_config(): ip = IPython.get_ipython() @@ -662,7 +853,7 @@ def test_bigquery_magic_dryrun_option_sets_job_config(): with run_query_patch as run_query_mock: ip.run_cell_magic("bigquery", "--dry_run", sql) - job_config_used = run_query_mock.call_args_list[0][0][-1] + job_config_used = run_query_mock.call_args_list[0][1]["job_config"] assert job_config_used.dry_run is True @@ -743,6 +934,37 @@ def test_bigquery_magic_dryrun_option_saves_query_job_to_variable(): assert isinstance(q_job, job.QueryJob) +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_saves_query_job_to_variable_on_error(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + client_query_patch = mock.patch( + "google.cloud.bigquery.client.Client.query", autospec=True + ) + + query_job = mock.create_autospec(job.QueryJob, instance=True) + exception = Exception("Unexpected SELECT") + exception.query_job = query_job + query_job.result.side_effect = exception + + sql = "SELECT SELECT 17 AS num" + + assert "result" not in ip.user_ns + + with client_query_patch as client_query_mock: + client_query_mock.return_value = query_job + return_value = ip.run_cell_magic("bigquery", "result", sql) + + assert return_value is None + assert "result" in ip.user_ns + result = ip.user_ns["result"] + assert isinstance(result, job.QueryJob) + + @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_w_maximum_bytes_billed_invalid(): ip = IPython.get_ipython() @@ -924,6 +1146,7 @@ def test_bigquery_magic_with_string_params(): run_query_mock.return_value = query_job_mock ip.run_cell_magic("bigquery", 'params_string_df --params {"num":17}', sql) + run_query_mock.assert_called_once_with(mock.ANY, sql.format(num=17), mock.ANY) assert "params_string_df" in ip.user_ns # verify that the variable exists @@ -959,6 +1182,7 @@ def test_bigquery_magic_with_dict_params(): # Insert dictionary into user namespace so that it can be expanded ip.user_ns["params"] = params ip.run_cell_magic("bigquery", "params_dict_df --params $params", sql) + run_query_mock.assert_called_once_with(mock.ANY, sql.format(num=17), mock.ANY) assert "params_dict_df" in ip.user_ns # verify that the variable exists @@ -1007,3 +1231,62 @@ def test_bigquery_magic_omits_tracebacks_from_error_message(): assert "400 Syntax error in SQL query" in output assert "Traceback (most recent call last)" not in output assert "Syntax error" not in captured_io.stdout + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_w_destination_table_invalid_format(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context._project = None + + credentials_mock = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + default_patch = mock.patch( + "google.auth.default", return_value=(credentials_mock, "general-project") + ) + + client_patch = mock.patch( + "google.cloud.bigquery.magics.bigquery.Client", autospec=True + ) + + with client_patch, default_patch, pytest.raises(ValueError) as exc_context: + ip.run_cell_magic( + "bigquery", "--destination_table dataset", "SELECT foo FROM WHERE LIMIT bar" + ) + error_msg = str(exc_context.value) + assert ( + "--destination_table should be in a " + ". format." in error_msg + ) + + +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_w_destination_table(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + create_dataset_if_necessary_patch = mock.patch( + "google.cloud.bigquery.magics._create_dataset_if_necessary", autospec=True + ) + + run_query_patch = mock.patch( + "google.cloud.bigquery.magics._run_query", autospec=True + ) + + with create_dataset_if_necessary_patch, run_query_patch as run_query_mock: + ip.run_cell_magic( + "bigquery", + "--destination_table dataset_id.table_id", + "SELECT foo FROM WHERE LIMIT bar", + ) + + job_config_used = run_query_mock.call_args_list[0][1]["job_config"] + assert job_config_used.allow_large_results is True + assert job_config_used.create_disposition == "CREATE_IF_NEEDED" + assert job_config_used.write_disposition == "WRITE_TRUNCATE" + assert job_config_used.destination.dataset_id == "dataset_id" + assert job_config_used.destination.table_id == "table_id" diff --git a/bigquery/tests/unit/test_schema.py b/bigquery/tests/unit/test_schema.py index 862d8a823e62..fc8a41c68c46 100644 --- a/bigquery/tests/unit/test_schema.py +++ b/bigquery/tests/unit/test_schema.py @@ -568,3 +568,69 @@ def test_w_subfields(self): ], }, ) + + +class Test_to_schema_fields(unittest.TestCase): + @staticmethod + def _call_fut(schema): + from google.cloud.bigquery.schema import _to_schema_fields + + return _to_schema_fields(schema) + + def test_invalid_type(self): + schema = [ + ("full_name", "STRING", "REQUIRED"), + ("address", "STRING", "REQUIRED"), + ] + with self.assertRaises(ValueError): + self._call_fut(schema) + + def test_schema_fields_sequence(self): + from google.cloud.bigquery.schema import SchemaField + + schema = [ + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INT64", mode="NULLABLE"), + ] + result = self._call_fut(schema) + self.assertEqual(result, schema) + + def test_invalid_mapping_representation(self): + schema = [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "address", "typeooo": "STRING", "mode": "REQUIRED"}, + ] + with self.assertRaises(Exception): + self._call_fut(schema) + + def test_valid_mapping_representation(self): + from google.cloud.bigquery.schema import SchemaField + + schema = [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + { + "name": "residence", + "type": "STRUCT", + "mode": "NULLABLE", + "fields": [ + {"name": "foo", "type": "DATE", "mode": "NULLABLE"}, + {"name": "bar", "type": "BYTES", "mode": "REQUIRED"}, + ], + }, + ] + + expected_schema = [ + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField( + "residence", + "STRUCT", + mode="NULLABLE", + fields=[ + SchemaField("foo", "DATE", mode="NULLABLE"), + SchemaField("bar", "BYTES", mode="REQUIRED"), + ], + ), + ] + + result = self._call_fut(schema) + self.assertEqual(result, expected_schema) diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index 8ba7fee892e5..97a7b4ae745e 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -71,7 +71,7 @@ def _verifySchema(self, schema, resource): class TestEncryptionConfiguration(unittest.TestCase): - KMS_KEY_NAME = "projects/1/locations/global/keyRings/1/cryptoKeys/1" + KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" @staticmethod def _get_target_class(): @@ -90,78 +90,6 @@ def test_ctor_with_key(self): encryption_config = self._make_one(kms_key_name=self.KMS_KEY_NAME) self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) - def test_kms_key_name_setter(self): - encryption_config = self._make_one() - self.assertIsNone(encryption_config.kms_key_name) - encryption_config.kms_key_name = self.KMS_KEY_NAME - self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) - encryption_config.kms_key_name = None - self.assertIsNone(encryption_config.kms_key_name) - - def test_from_api_repr(self): - RESOURCE = {"kmsKeyName": self.KMS_KEY_NAME} - klass = self._get_target_class() - encryption_config = klass.from_api_repr(RESOURCE) - self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) - - def test_to_api_repr(self): - encryption_config = self._make_one(kms_key_name=self.KMS_KEY_NAME) - resource = encryption_config.to_api_repr() - self.assertEqual(resource, {"kmsKeyName": self.KMS_KEY_NAME}) - - def test___eq___wrong_type(self): - encryption_config = self._make_one() - other = object() - self.assertNotEqual(encryption_config, other) - self.assertEqual(encryption_config, mock.ANY) - - def test___eq___kms_key_name_mismatch(self): - encryption_config = self._make_one() - other = self._make_one(self.KMS_KEY_NAME) - self.assertNotEqual(encryption_config, other) - - def test___eq___hit(self): - encryption_config = self._make_one(self.KMS_KEY_NAME) - other = self._make_one(self.KMS_KEY_NAME) - self.assertEqual(encryption_config, other) - - def test___ne___wrong_type(self): - encryption_config = self._make_one() - other = object() - self.assertNotEqual(encryption_config, other) - self.assertEqual(encryption_config, mock.ANY) - - def test___ne___same_value(self): - encryption_config1 = self._make_one(self.KMS_KEY_NAME) - encryption_config2 = self._make_one(self.KMS_KEY_NAME) - # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = encryption_config1 != encryption_config2 - self.assertFalse(comparison_val) - - def test___ne___different_values(self): - encryption_config1 = self._make_one() - encryption_config2 = self._make_one(self.KMS_KEY_NAME) - self.assertNotEqual(encryption_config1, encryption_config2) - - def test___hash__set_equality(self): - encryption_config1 = self._make_one(self.KMS_KEY_NAME) - encryption_config2 = self._make_one(self.KMS_KEY_NAME) - set_one = {encryption_config1, encryption_config2} - set_two = {encryption_config1, encryption_config2} - self.assertEqual(set_one, set_two) - - def test___hash__not_equals(self): - encryption_config1 = self._make_one() - encryption_config2 = self._make_one(self.KMS_KEY_NAME) - set_one = {encryption_config1} - set_two = {encryption_config2} - self.assertNotEqual(set_one, set_two) - - def test___repr__(self): - encryption_config = self._make_one(self.KMS_KEY_NAME) - expected = "EncryptionConfiguration({})".format(self.KMS_KEY_NAME) - self.assertEqual(repr(encryption_config), expected) - class TestTableReference(unittest.TestCase): @staticmethod @@ -215,11 +143,23 @@ def test_from_string(self): self.assertEqual(got.dataset_id, "string_dataset") self.assertEqual(got.table_id, "string_table") + def test_from_string_w_prefix(self): + cls = self._get_target_class() + got = cls.from_string("google.com:string-project.string_dataset.string_table") + self.assertEqual(got.project, "google.com:string-project") + self.assertEqual(got.dataset_id, "string_dataset") + self.assertEqual(got.table_id, "string_table") + def test_from_string_legacy_string(self): cls = self._get_target_class() with self.assertRaises(ValueError): cls.from_string("string-project:string_dataset.string_table") + def test_from_string_w_incorrect_prefix(self): + cls = self._get_target_class() + with self.assertRaises(ValueError): + cls.from_string("google.com.string-project.string_dataset.string_table") + def test_from_string_not_fully_qualified(self): cls = self._get_target_class() with self.assertRaises(ValueError): @@ -327,7 +267,7 @@ class TestTable(unittest.TestCase, _SchemaBase): PROJECT = "prahj-ekt" DS_ID = "dataset-name" TABLE_NAME = "table-name" - KMS_KEY_NAME = "projects/1/locations/global/keyRings/1/cryptoKeys/1" + KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" @staticmethod def _get_target_class(): @@ -510,7 +450,7 @@ def test_ctor(self): self.assertIsNone(table.clustering_fields) def test_ctor_w_schema(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) @@ -616,7 +556,7 @@ def test_num_rows_getter(self): with self.assertRaises(ValueError): getattr(table, "num_rows") - def test_schema_setter_non_list(self): + def test_schema_setter_non_sequence(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) @@ -624,7 +564,7 @@ def test_schema_setter_non_list(self): table.schema = object() def test_schema_setter_invalid_field(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) @@ -633,8 +573,8 @@ def test_schema_setter_invalid_field(self): with self.assertRaises(ValueError): table.schema = [full_name, object()] - def test_schema_setter(self): - from google.cloud.bigquery.table import SchemaField + def test_schema_setter_valid_fields(self): + from google.cloud.bigquery.schema import SchemaField dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) @@ -644,6 +584,48 @@ def test_schema_setter(self): table.schema = [full_name, age] self.assertEqual(table.schema, [full_name, age]) + def test_schema_setter_invalid_mapping_representation(self): + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref) + full_name = {"name": "full_name", "type": "STRING", "mode": "REQUIRED"} + invalid_field = {"name": "full_name", "typeooo": "STRING", "mode": "REQUIRED"} + with self.assertRaises(Exception): + table.schema = [full_name, invalid_field] + + def test_schema_setter_valid_mapping_representation(self): + from google.cloud.bigquery.schema import SchemaField + + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref) + full_name = {"name": "full_name", "type": "STRING", "mode": "REQUIRED"} + job_status = { + "name": "is_employed", + "type": "STRUCT", + "mode": "NULLABLE", + "fields": [ + {"name": "foo", "type": "DATE", "mode": "NULLABLE"}, + {"name": "bar", "type": "BYTES", "mode": "REQUIRED"}, + ], + } + + table.schema = [full_name, job_status] + + expected_schema = [ + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField( + "is_employed", + "STRUCT", + mode="NULLABLE", + fields=[ + SchemaField("foo", "DATE", mode="NULLABLE"), + SchemaField("bar", "BYTES", mode="REQUIRED"), + ], + ), + ] + self.assertEqual(table.schema, expected_schema) + def test_props_set_by_server(self): import datetime from google.cloud._helpers import UTC @@ -916,6 +898,40 @@ def test__build_resource_w_custom_field_not_in__properties(self): with self.assertRaises(ValueError): table._build_resource(["bad"]) + def test_range_partitioning(self): + from google.cloud.bigquery.table import RangePartitioning + from google.cloud.bigquery.table import PartitionRange + + table = self._make_one("proj.dset.tbl") + assert table.range_partitioning is None + + table.range_partitioning = RangePartitioning( + field="col1", range_=PartitionRange(start=-512, end=1024, interval=128) + ) + assert table.range_partitioning.field == "col1" + assert table.range_partitioning.range_.start == -512 + assert table.range_partitioning.range_.end == 1024 + assert table.range_partitioning.range_.interval == 128 + + table.range_partitioning = None + assert table.range_partitioning is None + + def test_range_partitioning_w_wrong_type(self): + object_under_test = self._make_one("proj.dset.tbl") + with pytest.raises(ValueError, match="RangePartitioning"): + object_under_test.range_partitioning = object() + + def test_require_partitioning_filter(self): + table = self._make_one("proj.dset.tbl") + assert table.require_partition_filter is None + table.require_partition_filter = True + assert table.require_partition_filter + table.require_partition_filter = False + assert table.require_partition_filter is not None + assert not table.require_partition_filter + table.require_partition_filter = None + assert table.require_partition_filter is None + def test_time_partitioning_getter(self): from google.cloud.bigquery.table import TimePartitioning from google.cloud.bigquery.table import TimePartitioningType @@ -934,7 +950,12 @@ def test_time_partitioning_getter(self): self.assertEqual(table.time_partitioning.type_, TimePartitioningType.DAY) self.assertEqual(table.time_partitioning.field, "col1") self.assertEqual(table.time_partitioning.expiration_ms, 123456) - self.assertFalse(table.time_partitioning.require_partition_filter) + + with warnings.catch_warnings(record=True) as warned: + self.assertFalse(table.time_partitioning.require_partition_filter) + + assert len(warned) == 1 + self.assertIs(warned[0].category, PendingDeprecationWarning) def test_time_partitioning_getter_w_none(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -962,7 +983,12 @@ def test_time_partitioning_getter_w_empty(self): self.assertIsNone(table.time_partitioning.type_) self.assertIsNone(table.time_partitioning.field) self.assertIsNone(table.time_partitioning.expiration_ms) - self.assertIsNone(table.time_partitioning.require_partition_filter) + + with warnings.catch_warnings(record=True) as warned: + self.assertIsNone(table.time_partitioning.require_partition_filter) + + for warning in warned: + self.assertIs(warning.category, PendingDeprecationWarning) def test_time_partitioning_setter(self): from google.cloud.bigquery.table import TimePartitioning @@ -1106,6 +1132,10 @@ def test_clustering_fields_setter_w_none_noop(self): self.assertFalse("clustering" in table._properties) def test_encryption_configuration_setter(self): + # Previously, the EncryptionConfiguration class was in the table module, not the + # encryption_configuration module. It was moved to support models encryption. + # This test import from the table module to ensure that the previous location + # continues to function as an alias. from google.cloud.bigquery.table import EncryptionConfiguration dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -1157,7 +1187,8 @@ def test__row_from_mapping_wo_schema(self): self.assertEqual(exc.exception.args, (_TABLE_HAS_NO_SCHEMA,)) def test__row_from_mapping_w_invalid_schema(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table MAPPING = { "full_name": "Phred Phlyntstone", @@ -1179,7 +1210,8 @@ def test__row_from_mapping_w_invalid_schema(self): self.assertIn("Unknown field mode: BOGUS", str(exc.exception)) def test__row_from_mapping_w_schema(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table MAPPING = { "full_name": "Phred Phlyntstone", @@ -1509,8 +1541,24 @@ def test_constructor_with_table(self): self.assertIs(iterator._table, table) self.assertEqual(iterator.total_rows, 100) + def test_constructor_with_dict_schema(self): + from google.cloud.bigquery.schema import SchemaField + + schema = [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "age", "type": "INT64", "mode": "NULLABLE"}, + ] + + iterator = self._make_one(schema=schema) + + expected_schema = [ + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INT64", mode="NULLABLE"), + ] + self.assertEqual(iterator.schema, expected_schema) + def test_iterate(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1541,7 +1589,7 @@ def test_iterate(self): api_request.assert_called_once_with(method="GET", path=path, query_params={}) def test_page_size(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1567,7 +1615,7 @@ def test_page_size(self): @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1649,7 +1697,7 @@ def test_to_arrow(self): @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_w_nulls(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [SchemaField("name", "STRING"), SchemaField("age", "INTEGER")] rows = [ @@ -1682,7 +1730,7 @@ def test_to_arrow_w_nulls(self): @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_w_unknown_type(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1720,7 +1768,7 @@ def test_to_arrow_w_unknown_type(self): @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_w_empty_table(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1882,7 +1930,7 @@ def test_to_arrow_w_bqstorage_no_streams(self): @mock.patch("tqdm.tqdm_notebook") @mock.patch("tqdm.tqdm") def test_to_arrow_progress_bar(self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_mock): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1925,7 +1973,7 @@ def test_to_arrow_w_pyarrow_none(self): @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1957,7 +2005,7 @@ def test_to_dataframe(self): def test_to_dataframe_progress_bar( self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_mock ): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1990,7 +2038,7 @@ def test_to_dataframe_progress_bar( @unittest.skipIf(pandas is None, "Requires `pandas`") @mock.patch("google.cloud.bigquery.table.tqdm", new=None) def test_to_dataframe_no_tqdm_no_progress_bar(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -2015,7 +2063,7 @@ def test_to_dataframe_no_tqdm_no_progress_bar(self): @unittest.skipIf(pandas is None, "Requires `pandas`") @mock.patch("google.cloud.bigquery.table.tqdm", new=None) def test_to_dataframe_no_tqdm(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -2048,7 +2096,7 @@ def test_to_dataframe_no_tqdm(self): @mock.patch("tqdm.tqdm_notebook", new=None) # will raise TypeError on call @mock.patch("tqdm.tqdm", new=None) # will raise TypeError on call def test_to_dataframe_tqdm_error(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -2078,7 +2126,7 @@ def test_to_dataframe_tqdm_error(self): @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_empty_results(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -2113,7 +2161,7 @@ def test_to_dataframe_logs_tabledata_list(self): @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_various_types_nullable(self): import datetime - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("start_timestamp", "TIMESTAMP"), @@ -2153,7 +2201,7 @@ def test_to_dataframe_w_various_types_nullable(self): @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_column_dtypes(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("start_timestamp", "TIMESTAMP"), @@ -2191,7 +2239,7 @@ def test_to_dataframe_column_dtypes(self): @mock.patch("google.cloud.bigquery.table.pandas", new=None) def test_to_dataframe_error_if_pandas_is_none(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -2208,6 +2256,42 @@ def test_to_dataframe_error_if_pandas_is_none(self): with self.assertRaises(ValueError): row_iterator.to_dataframe() + @unittest.skipIf(pandas is None, "Requires `pandas`") + def test_to_dataframe_max_results_w_bqstorage_warning(self): + from google.cloud.bigquery.schema import SchemaField + + schema = [ + SchemaField("name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), + ] + rows = [ + {"f": [{"v": "Phred Phlyntstone"}, {"v": "32"}]}, + {"f": [{"v": "Bharney Rhubble"}, {"v": "33"}]}, + ] + path = "/foo" + api_request = mock.Mock(return_value={"rows": rows}) + bqstorage_client = mock.Mock() + + row_iterator = self._make_one( + client=_mock_client(), + api_request=api_request, + path=path, + schema=schema, + max_results=42, + ) + + with warnings.catch_warnings(record=True) as warned: + row_iterator.to_dataframe(bqstorage_client=bqstorage_client) + + matches = [ + warning + for warning in warned + if warning.category is UserWarning + and "cannot use bqstorage_client" in str(warning).lower() + and "tabledata.list" in str(warning) + ] + self.assertEqual(len(matches), 1, msg="User warning was not emitted.") + @unittest.skipIf(pandas is None, "Requires `pandas`") @unittest.skipIf( bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`" @@ -2776,6 +2860,96 @@ def test_to_dataframe_w_bqstorage_snapshot(self): row_iterator.to_dataframe(bqstorage_client) +class TestPartitionRange(unittest.TestCase): + def _get_target_class(self): + from google.cloud.bigquery.table import PartitionRange + + return PartitionRange + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_constructor_defaults(self): + object_under_test = self._make_one() + assert object_under_test.start is None + assert object_under_test.end is None + assert object_under_test.interval is None + + def test_constructor_w_properties(self): + object_under_test = self._make_one(start=1, end=10, interval=2) + assert object_under_test.start == 1 + assert object_under_test.end == 10 + assert object_under_test.interval == 2 + + def test_constructor_w_resource(self): + object_under_test = self._make_one( + _properties={"start": -1234567890, "end": 1234567890, "interval": 1000000} + ) + assert object_under_test.start == -1234567890 + assert object_under_test.end == 1234567890 + assert object_under_test.interval == 1000000 + + def test_repr(self): + object_under_test = self._make_one(start=1, end=10, interval=2) + assert repr(object_under_test) == "PartitionRange(end=10, interval=2, start=1)" + + +class TestRangePartitioning(unittest.TestCase): + def _get_target_class(self): + from google.cloud.bigquery.table import RangePartitioning + + return RangePartitioning + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_constructor_defaults(self): + object_under_test = self._make_one() + assert object_under_test.field is None + assert object_under_test.range_.start is None + assert object_under_test.range_.end is None + assert object_under_test.range_.interval is None + + def test_constructor_w_properties(self): + from google.cloud.bigquery.table import PartitionRange + + object_under_test = self._make_one( + range_=PartitionRange(start=1, end=10, interval=2), field="integer_col" + ) + assert object_under_test.field == "integer_col" + assert object_under_test.range_.start == 1 + assert object_under_test.range_.end == 10 + assert object_under_test.range_.interval == 2 + + def test_constructor_w_resource(self): + object_under_test = self._make_one( + _properties={ + "field": "some_column", + "range": {"start": -1234567890, "end": 1234567890, "interval": 1000000}, + } + ) + assert object_under_test.field == "some_column" + assert object_under_test.range_.start == -1234567890 + assert object_under_test.range_.end == 1234567890 + assert object_under_test.range_.interval == 1000000 + + def test_range_w_wrong_type(self): + object_under_test = self._make_one() + with pytest.raises(ValueError, match="PartitionRange"): + object_under_test.range_ = object() + + def test_repr(self): + from google.cloud.bigquery.table import PartitionRange + + object_under_test = self._make_one( + range_=PartitionRange(start=1, end=10, interval=2), field="integer_col" + ) + assert ( + repr(object_under_test) + == "RangePartitioning(field='integer_col', range_=PartitionRange(end=10, interval=2, start=1))" + ) + + class TestTimePartitioning(unittest.TestCase): def _get_target_class(self): from google.cloud.bigquery.table import TimePartitioning @@ -2787,26 +2961,32 @@ def _make_one(self, *args, **kw): def test_constructor_defaults(self): time_partitioning = self._make_one() - self.assertEqual(time_partitioning.type_, "DAY") self.assertIsNone(time_partitioning.field) self.assertIsNone(time_partitioning.expiration_ms) - self.assertIsNone(time_partitioning.require_partition_filter) def test_constructor_explicit(self): from google.cloud.bigquery.table import TimePartitioningType time_partitioning = self._make_one( - type_=TimePartitioningType.DAY, - field="name", - expiration_ms=10000, - require_partition_filter=True, + type_=TimePartitioningType.DAY, field="name", expiration_ms=10000 ) self.assertEqual(time_partitioning.type_, "DAY") self.assertEqual(time_partitioning.field, "name") self.assertEqual(time_partitioning.expiration_ms, 10000) - self.assertTrue(time_partitioning.require_partition_filter) + + def test_require_partition_filter_warns_deprecation(self): + object_under_test = self._make_one() + + with warnings.catch_warnings(record=True) as warned: + assert object_under_test.require_partition_filter is None + object_under_test.require_partition_filter = True + assert object_under_test.require_partition_filter + + assert len(warned) == 3 + for warning in warned: + self.assertIs(warning.category, PendingDeprecationWarning) def test_from_api_repr_empty(self): klass = self._get_target_class() @@ -2820,7 +3000,6 @@ def test_from_api_repr_empty(self): self.assertIsNone(time_partitioning.type_) self.assertIsNone(time_partitioning.field) self.assertIsNone(time_partitioning.expiration_ms) - self.assertIsNone(time_partitioning.require_partition_filter) def test_from_api_repr_minimal(self): from google.cloud.bigquery.table import TimePartitioningType @@ -2832,7 +3011,6 @@ def test_from_api_repr_minimal(self): self.assertEqual(time_partitioning.type_, TimePartitioningType.DAY) self.assertIsNone(time_partitioning.field) self.assertIsNone(time_partitioning.expiration_ms) - self.assertIsNone(time_partitioning.require_partition_filter) def test_from_api_repr_doesnt_override_type(self): klass = self._get_target_class() @@ -2855,7 +3033,11 @@ def test_from_api_repr_explicit(self): self.assertEqual(time_partitioning.type_, TimePartitioningType.DAY) self.assertEqual(time_partitioning.field, "name") self.assertEqual(time_partitioning.expiration_ms, 10000) - self.assertTrue(time_partitioning.require_partition_filter) + + with warnings.catch_warnings(record=True) as warned: + self.assertTrue(time_partitioning.require_partition_filter) + + self.assertIs(warned[0].category, PendingDeprecationWarning) def test_to_api_repr_defaults(self): time_partitioning = self._make_one() @@ -2866,12 +3048,14 @@ def test_to_api_repr_explicit(self): from google.cloud.bigquery.table import TimePartitioningType time_partitioning = self._make_one( - type_=TimePartitioningType.DAY, - field="name", - expiration_ms=10000, - require_partition_filter=True, + type_=TimePartitioningType.DAY, field="name", expiration_ms=10000 ) + with warnings.catch_warnings(record=True) as warned: + time_partitioning.require_partition_filter = True + + self.assertIs(warned[0].category, PendingDeprecationWarning) + expected = { "type": "DAY", "field": "name", @@ -2902,21 +3086,21 @@ def test___eq___expiration_ms_mismatch(self): self.assertNotEqual(time_partitioning, other) def test___eq___require_partition_filter_mismatch(self): - time_partitioning = self._make_one( - field="foo", expiration_ms=100000, require_partition_filter=True - ) - other = self._make_one( - field="foo", expiration_ms=100000, require_partition_filter=False - ) + time_partitioning = self._make_one(field="foo", expiration_ms=100000) + other = self._make_one(field="foo", expiration_ms=100000) + with warnings.catch_warnings(record=True) as warned: + time_partitioning.require_partition_filter = True + other.require_partition_filter = False + + assert len(warned) == 2 + for warning in warned: + self.assertIs(warning.category, PendingDeprecationWarning) + self.assertNotEqual(time_partitioning, other) def test___eq___hit(self): - time_partitioning = self._make_one( - field="foo", expiration_ms=100000, require_partition_filter=True - ) - other = self._make_one( - field="foo", expiration_ms=100000, require_partition_filter=True - ) + time_partitioning = self._make_one(field="foo", expiration_ms=100000) + other = self._make_one(field="foo", expiration_ms=100000) self.assertEqual(time_partitioning, other) def test___ne___wrong_type(self): @@ -2960,18 +3144,9 @@ def test___repr___explicit(self): from google.cloud.bigquery.table import TimePartitioningType time_partitioning = self._make_one( - type_=TimePartitioningType.DAY, - field="name", - expiration_ms=10000, - require_partition_filter=True, - ) - expected = ( - "TimePartitioning(" - "expirationMs=10000," - "field=name," - "requirePartitionFilter=True," - "type=DAY)" + type_=TimePartitioningType.DAY, field="name", expiration_ms=10000 ) + expected = "TimePartitioning(" "expirationMs=10000," "field=name," "type=DAY)" self.assertEqual(repr(time_partitioning), expected) def test_set_expiration_w_none(self): diff --git a/bigquery_datatransfer/docs/conf.py b/bigquery_datatransfer/docs/conf.py index ef6f120fefa6..9de00dd124ef 100644 --- a/bigquery_datatransfer/docs/conf.py +++ b/bigquery_datatransfer/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py index 5c52eed8a43d..4466ec5b9c18 100644 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py +++ b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py @@ -297,8 +297,8 @@ def get_data_source( >>> response = client.get_data_source(name) Args: - name (str): The field will contain name of the resource requested, for example: - ``projects/{project_id}/dataSources/{data_source_id}`` + name (str): Required. The field will contain name of the resource requested, for + example: ``projects/{project_id}/dataSources/{data_source_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -381,8 +381,8 @@ def list_data_sources( ... pass Args: - parent (str): The BigQuery project id for which data sources should be returned. Must - be in the form: ``projects/{project_id}`` + parent (str): Required. The BigQuery project id for which data sources should be + returned. Must be in the form: ``projects/{project_id}`` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -478,12 +478,12 @@ def create_transfer_config( >>> response = client.create_transfer_config(parent, transfer_config) Args: - parent (str): The BigQuery project id where the transfer configuration should be - created. Must be in the format + parent (str): Required. The BigQuery project id where the transfer configuration + should be created. Must be in the format projects/{project\_id}/locations/{location\_id} If specified location and location of the destination bigquery dataset do not match - the request will fail. - transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Data transfer configuration to create. + transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Required. Data transfer configuration to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` @@ -591,11 +591,11 @@ def update_transfer_config( >>> response = client.update_transfer_config(transfer_config, update_mask) Args: - transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Data transfer configuration to create. + transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Required. Data transfer configuration to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` - update_mask (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.FieldMask]): Required list of fields to be updated in this request. + update_mask (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.FieldMask]): Required. Required list of fields to be updated in this request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.FieldMask` @@ -696,8 +696,8 @@ def delete_transfer_config( >>> client.delete_transfer_config(name) Args: - name (str): The field will contain name of the resource requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` + name (str): Required. The field will contain name of the resource requested, for + example: ``projects/{project_id}/transferConfigs/{config_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -763,8 +763,8 @@ def get_transfer_config( >>> response = client.get_transfer_config(name) Args: - name (str): The field will contain name of the resource requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` + name (str): Required. The field will contain name of the resource requested, for + example: ``projects/{project_id}/transferConfigs/{config_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -847,8 +847,8 @@ def list_transfer_configs( ... pass Args: - parent (str): The BigQuery project id for which data sources should be returned: - ``projects/{project_id}``. + parent (str): Required. The BigQuery project id for which data sources should be + returned: ``projects/{project_id}``. data_source_ids (list[str]): When specified, only configurations of requested data sources are returned. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -950,14 +950,14 @@ def schedule_transfer_runs( >>> response = client.schedule_transfer_runs(parent, start_time, end_time) Args: - parent (str): Transfer configuration name in the form: + parent (str): Required. Transfer configuration name in the form: ``projects/{project_id}/transferConfigs/{config_id}``. - start_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Start time of the range of transfer runs. For example, + start_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Required. Start time of the range of transfer runs. For example, ``"2017-05-25T00:00:00+00:00"``. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.Timestamp` - end_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): End time of the range of transfer runs. For example, + end_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Required. End time of the range of transfer runs. For example, ``"2017-05-30T00:00:00+00:00"``. If a dict is provided, it must be of the same form as the protobuf @@ -1032,7 +1032,8 @@ def get_transfer_run( >>> response = client.get_transfer_run(name) Args: - name (str): The field will contain name of the resource requested, for example: + name (str): Required. The field will contain name of the resource requested, for + example: ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -1102,7 +1103,8 @@ def delete_transfer_run( >>> client.delete_transfer_run(name) Args: - name (str): The field will contain name of the resource requested, for example: + name (str): Required. The field will contain name of the resource requested, for + example: ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -1184,8 +1186,8 @@ def list_transfer_runs( ... pass Args: - parent (str): Name of transfer configuration for which transfer runs should be - retrieved. Format of transfer configuration resource name is: + parent (str): Required. Name of transfer configuration for which transfer runs should + be retrieved. Format of transfer configuration resource name is: ``projects/{project_id}/transferConfigs/{config_id}``. states (list[~google.cloud.bigquery_datatransfer_v1.types.TransferState]): When specified, only transfer runs with requested states are returned. page_size (int): The maximum number of resources contained in the @@ -1292,7 +1294,7 @@ def list_transfer_logs( ... pass Args: - parent (str): Transfer run name in the form: + parent (str): Required. Transfer run name in the form: ``projects/{project_id}/transferConfigs/{config_Id}/runs/{run_id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -1390,7 +1392,7 @@ def check_valid_creds( >>> response = client.check_valid_creds(name) Args: - name (str): The data source in the form: + name (str): Required. The data source in the form: ``projects/{project_id}/dataSources/{data_source_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto index e9a39683494c..a464645b5ac3 100644 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto +++ b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto @@ -18,13 +18,15 @@ syntax = "proto3"; package google.cloud.bigquery.datatransfer.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/bigquery/datatransfer/v1/transfer.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; import "google/protobuf/wrappers.proto"; -import "google/api/client.proto"; option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1"; option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer"; @@ -50,6 +52,7 @@ service DataTransferService { get: "/v1/{name=projects/*/dataSources/*}" } }; + option (google.api.method_signature) = "name"; } // Lists supported data sources and returns their settings, @@ -61,6 +64,7 @@ service DataTransferService { get: "/v1/{parent=projects/*}/dataSources" } }; + option (google.api.method_signature) = "parent"; } // Creates a new data transfer configuration. @@ -73,6 +77,7 @@ service DataTransferService { body: "transfer_config" } }; + option (google.api.method_signature) = "parent,transfer_config"; } // Updates a data transfer configuration. @@ -86,6 +91,7 @@ service DataTransferService { body: "transfer_config" } }; + option (google.api.method_signature) = "transfer_config,update_mask"; } // Deletes a data transfer configuration, @@ -97,6 +103,7 @@ service DataTransferService { delete: "/v1/{name=projects/*/transferConfigs/*}" } }; + option (google.api.method_signature) = "name"; } // Returns information about a data transfer config. @@ -107,6 +114,7 @@ service DataTransferService { get: "/v1/{name=projects/*/transferConfigs/*}" } }; + option (google.api.method_signature) = "name"; } // Returns information about all data transfers in the project. @@ -117,6 +125,7 @@ service DataTransferService { get: "/v1/{parent=projects/*}/transferConfigs" } }; + option (google.api.method_signature) = "parent"; } // Creates transfer runs for a time range [start_time, end_time]. @@ -134,6 +143,7 @@ service DataTransferService { body: "*" } }; + option (google.api.method_signature) = "parent,start_time,end_time"; } // Start manual transfer runs to be executed now with schedule_time equal to @@ -159,6 +169,7 @@ service DataTransferService { get: "/v1/{name=projects/*/transferConfigs/*/runs/*}" } }; + option (google.api.method_signature) = "name"; } // Deletes the specified transfer run. @@ -169,6 +180,7 @@ service DataTransferService { delete: "/v1/{name=projects/*/transferConfigs/*/runs/*}" } }; + option (google.api.method_signature) = "name"; } // Returns information about running and completed jobs. @@ -179,6 +191,7 @@ service DataTransferService { get: "/v1/{parent=projects/*/transferConfigs/*}/runs" } }; + option (google.api.method_signature) = "parent"; } // Returns user facing log messages for the data transfer run. @@ -189,6 +202,7 @@ service DataTransferService { get: "/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs" } }; + option (google.api.method_signature) = "parent"; } // Returns true if valid credentials exist for the given data source and @@ -206,6 +220,7 @@ service DataTransferService { body: "*" } }; + option (google.api.method_signature) = "name"; } } @@ -295,6 +310,11 @@ message DataSourceParameter { // Represents data source metadata. Metadata is sufficient to // render UI and request proper OAuth tokens. message DataSource { + option (google.api.resource) = { + type: "bigquerydatatransfer.googleapis.com/DataSource" + pattern: "projects/{project}/dataSources/{data_source}" + }; + // The type of authorization needed for this data source. enum AuthorizationType { // Type unspecified. @@ -326,7 +346,7 @@ message DataSource { } // Output only. Data source resource name. - string name = 1; + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Data source id. string data_source_id = 2; @@ -396,16 +416,26 @@ message DataSource { // A request to get data source info. message GetDataSourceRequest { - // The field will contain name of the resource requested, for example: + // Required. The field will contain name of the resource requested, for example: // `projects/{project_id}/dataSources/{data_source_id}` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/DataSource" + } + ]; } // Request to list supported data sources and their data transfer settings. message ListDataSourcesRequest { - // The BigQuery project id for which data sources should be returned. + // Required. The BigQuery project id for which data sources should be returned. // Must be in the form: `projects/{project_id}` - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; // Pagination token, which can be used to request a specific page // of `ListDataSourcesRequest` list results. For multiple-page @@ -427,7 +457,7 @@ message ListDataSourcesResponse { // this token can be used as the // `ListDataSourcesRequest.page_token` // to request the next page of list results. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to create a data transfer configuration. If new credentials are @@ -437,14 +467,19 @@ message ListDataSourcesResponse { // authorization code. Otherwise, the transfer configuration will be associated // with the calling user. message CreateTransferConfigRequest { - // The BigQuery project id where the transfer configuration should be created. + // Required. The BigQuery project id where the transfer configuration should be created. // Must be in the format projects/{project_id}/locations/{location_id} // If specified location and location of the destination bigquery dataset // do not match - the request will fail. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; - // Data transfer configuration to create. - TransferConfig transfer_config = 2; + // Required. Data transfer configuration to create. + TransferConfig transfer_config = 2 [(google.api.field_behavior) = REQUIRED]; // Optional OAuth2 authorization code to use with this transfer configuration. // This is required if new credentials are needed, as indicated by @@ -476,8 +511,8 @@ message CreateTransferConfigRequest { // A request to update a transfer configuration. To update the user id of the // transfer configuration, an authorization code needs to be provided. message UpdateTransferConfigRequest { - // Data transfer configuration to create. - TransferConfig transfer_config = 1; + // Required. Data transfer configuration to create. + TransferConfig transfer_config = 1 [(google.api.field_behavior) = REQUIRED]; // Optional OAuth2 authorization code to use with this transfer configuration. // If it is provided, the transfer configuration will be associated with the @@ -497,8 +532,8 @@ message UpdateTransferConfigRequest { // the user to copy the code and paste it in the application. string authorization_code = 3; - // Required list of fields to be updated in this request. - google.protobuf.FieldMask update_mask = 4; + // Required. Required list of fields to be updated in this request. + google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; // Optional version info. If users want to find a very recent access token, // that is, immediately after approving access, users have to set the @@ -511,38 +546,63 @@ message UpdateTransferConfigRequest { // A request to get data transfer information. message GetTransferConfigRequest { - // The field will contain name of the resource requested, for example: + // Required. The field will contain name of the resource requested, for example: // `projects/{project_id}/transferConfigs/{config_id}` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferConfig" + } + ]; } // A request to delete data transfer information. All associated transfer runs // and log messages will be deleted as well. message DeleteTransferConfigRequest { - // The field will contain name of the resource requested, for example: + // Required. The field will contain name of the resource requested, for example: // `projects/{project_id}/transferConfigs/{config_id}` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferConfig" + } + ]; } // A request to get data transfer run information. message GetTransferRunRequest { - // The field will contain name of the resource requested, for example: + // Required. The field will contain name of the resource requested, for example: // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferRun" + } + ]; } // A request to delete data transfer run information. message DeleteTransferRunRequest { - // The field will contain name of the resource requested, for example: + // Required. The field will contain name of the resource requested, for example: // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferRun" + } + ]; } // A request to list data transfers configured for a BigQuery project. message ListTransferConfigsRequest { - // The BigQuery project id for which data sources + // Required. The BigQuery project id for which data sources // should be returned: `projects/{project_id}`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; // When specified, only configurations of requested data sources are returned. repeated string data_source_ids = 2; @@ -561,13 +621,13 @@ message ListTransferConfigsRequest { // The returned list of pipelines in the project. message ListTransferConfigsResponse { // Output only. The stored pipeline transfer configurations. - repeated TransferConfig transfer_configs = 1; + repeated TransferConfig transfer_configs = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The next-pagination token. For multiple-page list results, // this token can be used as the // `ListTransferConfigsRequest.page_token` // to request the next page of list results. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to list data transfer runs. UI can use this method to show/filter @@ -583,10 +643,15 @@ message ListTransferRunsRequest { LATEST = 1; } - // Name of transfer configuration for which transfer runs should be retrieved. + // Required. Name of transfer configuration for which transfer runs should be retrieved. // Format of transfer configuration resource name is: // `projects/{project_id}/transferConfigs/{config_id}`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferConfig" + } + ]; // When specified, only transfer runs with requested states are returned. repeated TransferState states = 2; @@ -608,20 +673,25 @@ message ListTransferRunsRequest { // The returned list of pipelines in the project. message ListTransferRunsResponse { // Output only. The stored pipeline transfer runs. - repeated TransferRun transfer_runs = 1; + repeated TransferRun transfer_runs = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The next-pagination token. For multiple-page list results, // this token can be used as the // `ListTransferRunsRequest.page_token` // to request the next page of list results. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to get user facing log messages associated with data transfer run. message ListTransferLogsRequest { - // Transfer run name in the form: + // Required. Transfer run name in the form: // `projects/{project_id}/transferConfigs/{config_Id}/runs/{run_id}`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferRun" + } + ]; // Pagination token, which can be used to request a specific page // of `ListTransferLogsRequest` list results. For multiple-page @@ -641,13 +711,13 @@ message ListTransferLogsRequest { // The returned list transfer run messages. message ListTransferLogsResponse { // Output only. The stored pipeline transfer messages. - repeated TransferMessage transfer_messages = 1; + repeated TransferMessage transfer_messages = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The next-pagination token. For multiple-page list results, // this token can be used as the // `GetTransferRunLogRequest.page_token` // to request the next page of list results. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to determine whether the user has valid credentials. This method @@ -657,9 +727,14 @@ message ListTransferLogsResponse { // returns false, as it cannot be determined whether the credentials are // already valid merely based on the user id. message CheckValidCredsRequest { - // The data source in the form: + // Required. The data source in the form: // `projects/{project_id}/dataSources/{data_source_id}` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/DataSource" + } + ]; } // A response indicating whether the credentials exist and are valid. @@ -670,17 +745,22 @@ message CheckValidCredsResponse { // A request to schedule transfer runs for a time range. message ScheduleTransferRunsRequest { - // Transfer configuration name in the form: + // Required. Transfer configuration name in the form: // `projects/{project_id}/transferConfigs/{config_id}`. - string parent = 1; - - // Start time of the range of transfer runs. For example, + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferConfig" + } + ]; + + // Required. Start time of the range of transfer runs. For example, // `"2017-05-25T00:00:00+00:00"`. - google.protobuf.Timestamp start_time = 2; + google.protobuf.Timestamp start_time = 2 [(google.api.field_behavior) = REQUIRED]; - // End time of the range of transfer runs. For example, + // Required. End time of the range of transfer runs. For example, // `"2017-05-30T00:00:00+00:00"`. - google.protobuf.Timestamp end_time = 3; + google.protobuf.Timestamp end_time = 3 [(google.api.field_behavior) = REQUIRED]; } // A response to schedule transfer runs for a time range. @@ -709,7 +789,9 @@ message StartManualTransferRunsRequest { // Transfer configuration name in the form: // `projects/{project_id}/transferConfigs/{config_id}`. - string parent = 1; + string parent = 1 [(google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferConfig" + }]; // The requested time specification - this can be a time range or a specific // run_time. diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py index 55338100718e..a70a55e3df79 100644 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py +++ b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.bigquery_datatransfer_v1.proto import ( transfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2, ) @@ -24,7 +27,6 @@ from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -35,17 +37,19 @@ "\n)com.google.cloud.bigquery.datatransfer.v1B\021DataTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1" ), serialized_pb=_b( - '\n>google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/api/client.proto"\x85\x05\n\x13\x44\x61taSourceParameter\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12M\n\x04type\x18\x04 \x01(\x0e\x32?.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type\x12\x10\n\x08required\x18\x05 \x01(\x08\x12\x10\n\x08repeated\x18\x06 \x01(\x08\x12\x18\n\x10validation_regex\x18\x07 \x01(\t\x12\x16\n\x0e\x61llowed_values\x18\x08 \x03(\t\x12/\n\tmin_value\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tmax_value\x18\n \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x06\x66ields\x18\x0b \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x1e\n\x16validation_description\x18\x0c \x01(\t\x12\x1b\n\x13validation_help_url\x18\r \x01(\t\x12\x11\n\timmutable\x18\x0e \x01(\x08\x12\x0f\n\x07recurse\x18\x0f \x01(\x08\x12\x12\n\ndeprecated\x18\x14 \x01(\x08"i\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x12\n\n\x06RECORD\x10\x05\x12\r\n\tPLUS_PAGE\x10\x06"\xd7\x07\n\nDataSource\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x11\n\tclient_id\x18\x05 \x01(\t\x12\x0e\n\x06scopes\x18\x06 \x03(\t\x12N\n\rtransfer_type\x18\x07 \x01(\x0e\x32\x33.google.cloud.bigquery.datatransfer.v1.TransferTypeB\x02\x18\x01\x12\'\n\x1bsupports_multiple_transfers\x18\x08 \x01(\x08\x42\x02\x18\x01\x12\x1f\n\x17update_deadline_seconds\x18\t \x01(\x05\x12\x18\n\x10\x64\x65\x66\x61ult_schedule\x18\n \x01(\t\x12 \n\x18supports_custom_schedule\x18\x0b \x01(\x08\x12N\n\nparameters\x18\x0c \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x10\n\x08help_url\x18\r \x01(\t\x12_\n\x12\x61uthorization_type\x18\x0e \x01(\x0e\x32\x43.google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType\x12\\\n\x11\x64\x61ta_refresh_type\x18\x0f \x01(\x0e\x32\x41.google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType\x12(\n default_data_refresh_window_days\x18\x10 \x01(\x05\x12\x1c\n\x14manual_runs_disabled\x18\x11 \x01(\x08\x12<\n\x19minimum_schedule_interval\x18\x12 \x01(\x0b\x32\x19.google.protobuf.Duration"s\n\x11\x41uthorizationType\x12"\n\x1e\x41UTHORIZATION_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x41UTHORIZATION_CODE\x10\x01\x12"\n\x1eGOOGLE_PLUS_AUTHORIZATION_CODE\x10\x02"c\n\x0f\x44\x61taRefreshType\x12!\n\x1d\x44\x41TA_REFRESH_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eSLIDING_WINDOW\x10\x01\x12\x19\n\x15\x43USTOM_SLIDING_WINDOW\x10\x02"$\n\x14GetDataSourceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"O\n\x16ListDataSourcesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"{\n\x17ListDataSourcesResponse\x12G\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xaf\x01\n\x1b\x43reateTransferConfigRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12N\n\x0ftransfer_config\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x14\n\x0cversion_info\x18\x05 \x01(\t"\xd0\x01\n\x1bUpdateTransferConfigRequest\x12N\n\x0ftransfer_config\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x14\n\x0cversion_info\x18\x05 \x01(\t"(\n\x18GetTransferConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"+\n\x1b\x44\x65leteTransferConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"%\n\x15GetTransferRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"(\n\x18\x44\x65leteTransferRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"l\n\x1aListTransferConfigsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x87\x01\n\x1bListTransferConfigsResponse\x12O\n\x10transfer_configs\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xad\x02\n\x17ListTransferRunsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x44\n\x06states\x18\x02 \x03(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12^\n\x0brun_attempt\x18\x05 \x01(\x0e\x32I.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt"5\n\nRunAttempt\x12\x1b\n\x17RUN_ATTEMPT_UNSPECIFIED\x10\x00\x12\n\n\x06LATEST\x10\x01"~\n\x18ListTransferRunsResponse\x12I\n\rtransfer_runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xaf\x01\n\x17ListTransferLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12]\n\rmessage_types\x18\x06 \x03(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity"\x86\x01\n\x18ListTransferLogsResponse\x12Q\n\x11transfer_messages\x18\x01 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessage\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"&\n\x16\x43heckValidCredsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"2\n\x17\x43heckValidCredsResponse\x12\x17\n\x0fhas_valid_creds\x18\x01 \x01(\x08"\x8b\x01\n\x1bScheduleTransferRunsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"`\n\x1cScheduleTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"\xce\x02\n\x1eStartManualTransferRunsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12o\n\x14requested_time_range\x18\x03 \x01(\x0b\x32O.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRangeH\x00\x12\x38\n\x12requested_run_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x1ai\n\tTimeRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x06\n\x04time"c\n\x1fStartManualTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun2\xa1\x1e\n\x13\x44\x61taTransferService\x12\xdf\x01\n\rGetDataSource\x12;.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest\x1a\x31.google.cloud.bigquery.datatransfer.v1.DataSource"^\x82\xd3\xe4\x93\x02X\x12//v1/{name=projects/*/locations/*/dataSources/*}Z%\x12#/v1/{name=projects/*/dataSources/*}\x12\xf0\x01\n\x0fListDataSources\x12=.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest\x1a>.google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse"^\x82\xd3\xe4\x93\x02X\x12//v1/{parent=projects/*/locations/*}/dataSourcesZ%\x12#/v1/{parent=projects/*}/dataSources\x12\x9d\x02\n\x14\x43reateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\x89\x01\x82\xd3\xe4\x93\x02\x82\x01"3/v1/{parent=projects/*/locations/*}/transferConfigs:\x0ftransfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\x0ftransfer_config\x12\xbd\x02\n\x14UpdateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x32\x43/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\x0ftransfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\x0ftransfer_config\x12\xda\x01\n\x14\x44\x65leteTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest\x1a\x16.google.protobuf.Empty"f\x82\xd3\xe4\x93\x02`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*\'/v1/{name=projects/*/transferConfigs/*}\x12\xf3\x01\n\x11GetTransferConfig\x12?.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"f\x82\xd3\xe4\x93\x02`\x12\x33/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\x12\'/v1/{name=projects/*/transferConfigs/*}\x12\x84\x02\n\x13ListTransferConfigs\x12\x41.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest\x1a\x42.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse"f\x82\xd3\xe4\x93\x02`\x12\x33/v1/{parent=projects/*/locations/*}/transferConfigsZ)\x12\'/v1/{parent=projects/*}/transferConfigs\x12\xb0\x02\n\x14ScheduleTransferRuns\x12\x42.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest\x1a\x43.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse"\x8e\x01\x88\x02\x01\x82\xd3\xe4\x93\x02\x84\x01"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\x01*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\x01*\x12\xbc\x02\n\x17StartManualTransferRuns\x12\x45.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest\x1a\x46.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse"\x91\x01\x82\xd3\xe4\x93\x02\x8a\x01"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\x01*Z>"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\x01*\x12\xf8\x01\n\x0eGetTransferRun\x12<.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"t\x82\xd3\xe4\x93\x02n\x12:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\x12./v1/{name=projects/*/transferConfigs/*/runs/*}\x12\xe2\x01\n\x11\x44\x65leteTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest\x1a\x16.google.protobuf.Empty"t\x82\xd3\xe4\x93\x02n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\x12\x89\x02\n\x10ListTransferRuns\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse"t\x82\xd3\xe4\x93\x02n\x12:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\x12./v1/{parent=projects/*/transferConfigs/*}/runs\x12\xa9\x02\n\x10ListTransferLogs\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse"\x93\x01\x82\xd3\xe4\x93\x02\x8c\x01\x12I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\x12=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\x12\x97\x02\n\x0f\x43heckValidCreds\x12=.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest\x1a>.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse"\x84\x01\x82\xd3\xe4\x93\x02~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\x01*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\x01*\x1aW\xca\x41#bigquerydatatransfer.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xe3\x01\n)com.google.cloud.bigquery.datatransfer.v1B\x11\x44\x61taTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3' + '\n>google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto"\x85\x05\n\x13\x44\x61taSourceParameter\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12M\n\x04type\x18\x04 \x01(\x0e\x32?.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type\x12\x10\n\x08required\x18\x05 \x01(\x08\x12\x10\n\x08repeated\x18\x06 \x01(\x08\x12\x18\n\x10validation_regex\x18\x07 \x01(\t\x12\x16\n\x0e\x61llowed_values\x18\x08 \x03(\t\x12/\n\tmin_value\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tmax_value\x18\n \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x06\x66ields\x18\x0b \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x1e\n\x16validation_description\x18\x0c \x01(\t\x12\x1b\n\x13validation_help_url\x18\r \x01(\t\x12\x11\n\timmutable\x18\x0e \x01(\x08\x12\x0f\n\x07recurse\x18\x0f \x01(\x08\x12\x12\n\ndeprecated\x18\x14 \x01(\x08"i\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x12\n\n\x06RECORD\x10\x05\x12\r\n\tPLUS_PAGE\x10\x06"\xbf\x08\n\nDataSource\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x16\n\x0e\x64\x61ta_source_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x11\n\tclient_id\x18\x05 \x01(\t\x12\x0e\n\x06scopes\x18\x06 \x03(\t\x12N\n\rtransfer_type\x18\x07 \x01(\x0e\x32\x33.google.cloud.bigquery.datatransfer.v1.TransferTypeB\x02\x18\x01\x12\'\n\x1bsupports_multiple_transfers\x18\x08 \x01(\x08\x42\x02\x18\x01\x12\x1f\n\x17update_deadline_seconds\x18\t \x01(\x05\x12\x18\n\x10\x64\x65\x66\x61ult_schedule\x18\n \x01(\t\x12 \n\x18supports_custom_schedule\x18\x0b \x01(\x08\x12N\n\nparameters\x18\x0c \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x10\n\x08help_url\x18\r \x01(\t\x12_\n\x12\x61uthorization_type\x18\x0e \x01(\x0e\x32\x43.google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType\x12\\\n\x11\x64\x61ta_refresh_type\x18\x0f \x01(\x0e\x32\x41.google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType\x12(\n default_data_refresh_window_days\x18\x10 \x01(\x05\x12\x1c\n\x14manual_runs_disabled\x18\x11 \x01(\x08\x12<\n\x19minimum_schedule_interval\x18\x12 \x01(\x0b\x32\x19.google.protobuf.Duration"s\n\x11\x41uthorizationType\x12"\n\x1e\x41UTHORIZATION_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x41UTHORIZATION_CODE\x10\x01\x12"\n\x1eGOOGLE_PLUS_AUTHORIZATION_CODE\x10\x02"c\n\x0f\x44\x61taRefreshType\x12!\n\x1d\x44\x41TA_REFRESH_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eSLIDING_WINDOW\x10\x01\x12\x19\n\x15\x43USTOM_SLIDING_WINDOW\x10\x02:a\xea\x41^\n.bigquerydatatransfer.googleapis.com/DataSource\x12,projects/{project}/dataSources/{data_source}"\\\n\x14GetDataSourceRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource"\x84\x01\n\x16ListDataSourcesRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x80\x01\n\x17ListDataSourcesResponse\x12G\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xdf\x01\n\x1b\x43reateTransferConfigRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12S\n\x0ftransfer_config\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x14\n\x0cversion_info\x18\x05 \x01(\t"\xda\x01\n\x1bUpdateTransferConfigRequest\x12S\n\x0ftransfer_config\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x14\n\x0cversion_info\x18\x05 \x01(\t"d\n\x18GetTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig"g\n\x1b\x44\x65leteTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig"^\n\x15GetTransferRunRequest\x12\x45\n\x04name\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\n/bigquerydatatransfer.googleapis.com/TransferRun"a\n\x18\x44\x65leteTransferRunRequest\x12\x45\n\x04name\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\n/bigquerydatatransfer.googleapis.com/TransferRun"\xa1\x01\n\x1aListTransferConfigsRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x91\x01\n\x1bListTransferConfigsResponse\x12T\n\x10transfer_configs\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xe9\x02\n\x17ListTransferRunsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x44\n\x06states\x18\x02 \x03(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12^\n\x0brun_attempt\x18\x05 \x01(\x0e\x32I.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt"5\n\nRunAttempt\x12\x1b\n\x17RUN_ATTEMPT_UNSPECIFIED\x10\x00\x12\n\n\x06LATEST\x10\x01"\x88\x01\n\x18ListTransferRunsResponse\x12N\n\rtransfer_runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRunB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xe8\x01\n\x17ListTransferLogsRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\n/bigquerydatatransfer.googleapis.com/TransferRun\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12]\n\rmessage_types\x18\x06 \x03(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity"\x90\x01\n\x18ListTransferLogsResponse\x12V\n\x11transfer_messages\x18\x01 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessageB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"^\n\x16\x43heckValidCredsRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource"2\n\x17\x43heckValidCredsResponse\x12\x17\n\x0fhas_valid_creds\x18\x01 \x01(\x08"\xd1\x01\n\x1bScheduleTransferRunsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x33\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x31\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02"`\n\x1cScheduleTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"\x87\x03\n\x1eStartManualTransferRunsRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12o\n\x14requested_time_range\x18\x03 \x01(\x0b\x32O.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRangeH\x00\x12\x38\n\x12requested_run_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x1ai\n\tTimeRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x06\n\x04time"c\n\x1fStartManualTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun2\xc3\x1f\n\x13\x44\x61taTransferService\x12\xe6\x01\n\rGetDataSource\x12;.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest\x1a\x31.google.cloud.bigquery.datatransfer.v1.DataSource"e\x82\xd3\xe4\x93\x02X\x12//v1/{name=projects/*/locations/*/dataSources/*}Z%\x12#/v1/{name=projects/*/dataSources/*}\xda\x41\x04name\x12\xf9\x01\n\x0fListDataSources\x12=.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest\x1a>.google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse"g\x82\xd3\xe4\x93\x02X\x12//v1/{parent=projects/*/locations/*}/dataSourcesZ%\x12#/v1/{parent=projects/*}/dataSources\xda\x41\x06parent\x12\xb6\x02\n\x14\x43reateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xa2\x01\x82\xd3\xe4\x93\x02\x82\x01"3/v1/{parent=projects/*/locations/*}/transferConfigs:\x0ftransfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\x0ftransfer_config\xda\x41\x16parent,transfer_config\x12\xdb\x02\n\x14UpdateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xc7\x01\x82\xd3\xe4\x93\x02\xa2\x01\x32\x43/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\x0ftransfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\x0ftransfer_config\xda\x41\x1btransfer_config,update_mask\x12\xe1\x01\n\x14\x44\x65leteTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest\x1a\x16.google.protobuf.Empty"m\x82\xd3\xe4\x93\x02`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*\'/v1/{name=projects/*/transferConfigs/*}\xda\x41\x04name\x12\xfa\x01\n\x11GetTransferConfig\x12?.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"m\x82\xd3\xe4\x93\x02`\x12\x33/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\x12\'/v1/{name=projects/*/transferConfigs/*}\xda\x41\x04name\x12\x8d\x02\n\x13ListTransferConfigs\x12\x41.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest\x1a\x42.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse"o\x82\xd3\xe4\x93\x02`\x12\x33/v1/{parent=projects/*/locations/*}/transferConfigsZ)\x12\'/v1/{parent=projects/*}/transferConfigs\xda\x41\x06parent\x12\xcd\x02\n\x14ScheduleTransferRuns\x12\x42.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest\x1a\x43.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse"\xab\x01\x88\x02\x01\x82\xd3\xe4\x93\x02\x84\x01"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\x01*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\x01*\xda\x41\x1aparent,start_time,end_time\x12\xbc\x02\n\x17StartManualTransferRuns\x12\x45.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest\x1a\x46.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse"\x91\x01\x82\xd3\xe4\x93\x02\x8a\x01"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\x01*Z>"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\x01*\x12\xff\x01\n\x0eGetTransferRun\x12<.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"{\x82\xd3\xe4\x93\x02n\x12:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\x12./v1/{name=projects/*/transferConfigs/*/runs/*}\xda\x41\x04name\x12\xe9\x01\n\x11\x44\x65leteTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest\x1a\x16.google.protobuf.Empty"{\x82\xd3\xe4\x93\x02n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\xda\x41\x04name\x12\x92\x02\n\x10ListTransferRuns\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse"}\x82\xd3\xe4\x93\x02n\x12:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\x12./v1/{parent=projects/*/transferConfigs/*}/runs\xda\x41\x06parent\x12\xb2\x02\n\x10ListTransferLogs\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse"\x9c\x01\x82\xd3\xe4\x93\x02\x8c\x01\x12I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\x12=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\xda\x41\x06parent\x12\x9e\x02\n\x0f\x43heckValidCreds\x12=.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest\x1a>.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse"\x8b\x01\x82\xd3\xe4\x93\x02~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\x01*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\x01*\xda\x41\x04name\x1aW\xca\x41#bigquerydatatransfer.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xe3\x01\n)com.google.cloud.bigquery.datatransfer.v1B\x11\x44\x61taTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -84,8 +88,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=921, - serialized_end=1026, + serialized_start=981, + serialized_end=1086, ) _sym_db.RegisterEnumDescriptor(_DATASOURCEPARAMETER_TYPE) @@ -119,8 +123,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1796, - serialized_end=1911, + serialized_start=1861, + serialized_end=1976, ) _sym_db.RegisterEnumDescriptor(_DATASOURCE_AUTHORIZATIONTYPE) @@ -150,8 +154,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1913, - serialized_end=2012, + serialized_start=1978, + serialized_end=2077, ) _sym_db.RegisterEnumDescriptor(_DATASOURCE_DATAREFRESHTYPE) @@ -174,8 +178,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3312, - serialized_end=3365, + serialized_start=4008, + serialized_end=4061, ) _sym_db.RegisterEnumDescriptor(_LISTTRANSFERRUNSREQUEST_RUNATTEMPT) @@ -484,8 +488,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=381, - serialized_end=1026, + serialized_start=441, + serialized_end=1086, ) @@ -511,7 +515,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -824,13 +828,15 @@ extensions=[], nested_types=[], enum_types=[_DATASOURCE_AUTHORIZATIONTYPE, _DATASOURCE_DATAREFRESHTYPE], - serialized_options=None, + serialized_options=_b( + "\352A^\n.bigquerydatatransfer.googleapis.com/DataSource\022,projects/{project}/dataSources/{data_source}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1029, - serialized_end=2012, + serialized_start=1089, + serialized_end=2176, ) @@ -856,7 +862,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A0\n.bigquerydatatransfer.googleapis.com/DataSource" + ), file=DESCRIPTOR, ) ], @@ -868,8 +876,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2014, - serialized_end=2050, + serialized_start=2178, + serialized_end=2270, ) @@ -895,7 +903,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -943,8 +953,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2052, - serialized_end=2131, + serialized_start=2273, + serialized_end=2405, ) @@ -988,7 +998,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1000,8 +1010,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2133, - serialized_end=2256, + serialized_start=2408, + serialized_end=2536, ) @@ -1027,7 +1037,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1045,7 +1057,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1093,8 +1105,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2259, - serialized_end=2434, + serialized_start=2539, + serialized_end=2762, ) @@ -1120,7 +1132,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1156,7 +1168,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1186,8 +1198,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2437, - serialized_end=2645, + serialized_start=2765, + serialized_end=2983, ) @@ -1213,7 +1225,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" + ), file=DESCRIPTOR, ) ], @@ -1225,8 +1239,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2647, - serialized_end=2687, + serialized_start=2985, + serialized_end=3085, ) @@ -1252,7 +1266,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" + ), file=DESCRIPTOR, ) ], @@ -1264,8 +1280,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2689, - serialized_end=2732, + serialized_start=3087, + serialized_end=3190, ) @@ -1291,7 +1307,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A1\n/bigquerydatatransfer.googleapis.com/TransferRun" + ), file=DESCRIPTOR, ) ], @@ -1303,8 +1321,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2734, - serialized_end=2771, + serialized_start=3192, + serialized_end=3286, ) @@ -1330,7 +1348,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A1\n/bigquerydatatransfer.googleapis.com/TransferRun" + ), file=DESCRIPTOR, ) ], @@ -1342,8 +1362,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2773, - serialized_end=2813, + serialized_start=3288, + serialized_end=3385, ) @@ -1369,7 +1389,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1435,8 +1457,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2815, - serialized_end=2923, + serialized_start=3388, + serialized_end=3549, ) @@ -1462,7 +1484,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1480,7 +1502,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1492,8 +1514,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2926, - serialized_end=3061, + serialized_start=3552, + serialized_end=3697, ) @@ -1519,7 +1541,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1603,8 +1627,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3064, - serialized_end=3365, + serialized_start=3700, + serialized_end=4061, ) @@ -1630,7 +1654,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1648,7 +1672,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1660,8 +1684,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3367, - serialized_end=3493, + serialized_start=4064, + serialized_end=4200, ) @@ -1687,7 +1711,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A1\n/bigquerydatatransfer.googleapis.com/TransferRun" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1753,8 +1779,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3496, - serialized_end=3671, + serialized_start=4203, + serialized_end=4435, ) @@ -1780,7 +1806,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1798,7 +1824,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1810,8 +1836,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3674, - serialized_end=3808, + serialized_start=4438, + serialized_end=4582, ) @@ -1837,7 +1863,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A0\n.bigquerydatatransfer.googleapis.com/DataSource" + ), file=DESCRIPTOR, ) ], @@ -1849,8 +1877,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3810, - serialized_end=3848, + serialized_start=4584, + serialized_end=4678, ) @@ -1888,8 +1916,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3850, - serialized_end=3900, + serialized_start=4680, + serialized_end=4730, ) @@ -1915,7 +1943,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1933,7 +1963,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1951,7 +1981,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1963,8 +1993,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3903, - serialized_end=4042, + serialized_start=4733, + serialized_end=4942, ) @@ -2002,8 +2032,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4044, - serialized_end=4140, + serialized_start=4944, + serialized_end=5040, ) @@ -2059,8 +2089,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4364, - serialized_end=4469, + serialized_start=5321, + serialized_end=5426, ) _STARTMANUALTRANSFERRUNSREQUEST = _descriptor.Descriptor( @@ -2085,7 +2115,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2141,8 +2173,8 @@ fields=[], ) ], - serialized_start=4143, - serialized_end=4477, + serialized_start=5043, + serialized_end=5434, ) @@ -2180,8 +2212,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4479, - serialized_end=4578, + serialized_start=5436, + serialized_end=5535, ) _DATASOURCEPARAMETER.fields_by_name["type"].enum_type = _DATASOURCEPARAMETER_TYPE @@ -2473,8 +2505,8 @@ Attributes: name: - The field will contain name of the resource requested, for - example: + Required. The field will contain name of the resource + requested, for example: ``projects/{project_id}/dataSources/{data_source_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest) @@ -2493,8 +2525,9 @@ Attributes: parent: - The BigQuery project id for which data sources should be - returned. Must be in the form: ``projects/{project_id}`` + Required. The BigQuery project id for which data sources + should be returned. Must be in the form: + ``projects/{project_id}`` page_token: Pagination token, which can be used to request a specific page of ``ListDataSourcesRequest`` list results. For multiple-page @@ -2549,13 +2582,13 @@ Attributes: parent: - The BigQuery project id where the transfer configuration - should be created. Must be in the format + Required. The BigQuery project id where the transfer + configuration should be created. Must be in the format projects/{project\_id}/locations/{location\_id} If specified location and location of the destination bigquery dataset do not match - the request will fail. transfer_config: - Data transfer configuration to create. + Required. Data transfer configuration to create. authorization_code: Optional OAuth2 authorization code to use with this transfer configuration. This is required if new credentials are needed, @@ -2599,7 +2632,7 @@ Attributes: transfer_config: - Data transfer configuration to create. + Required. Data transfer configuration to create. authorization_code: Optional OAuth2 authorization code to use with this transfer configuration. If it is provided, the transfer configuration @@ -2618,7 +2651,8 @@ with the page text prompting the user to copy the code and paste it in the application. update_mask: - Required list of fields to be updated in this request. + Required. Required list of fields to be updated in this + request. version_info: Optional version info. If users want to find a very recent access token, that is, immediately after approving access, @@ -2644,8 +2678,9 @@ Attributes: name: - The field will contain name of the resource requested, for - example: ``projects/{project_id}/transferConfigs/{config_id}`` + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest) ), @@ -2664,8 +2699,9 @@ Attributes: name: - The field will contain name of the resource requested, for - example: ``projects/{project_id}/transferConfigs/{config_id}`` + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest) ), @@ -2683,9 +2719,9 @@ Attributes: name: - The field will contain name of the resource requested, for - example: ``projects/{project_id}/transferConfigs/{config_id}/r - uns/{run_id}`` + Required. The field will contain name of the resource + requested, for example: ``projects/{project_id}/transferConfig + s/{config_id}/runs/{run_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest) ), @@ -2703,9 +2739,9 @@ Attributes: name: - The field will contain name of the resource requested, for - example: ``projects/{project_id}/transferConfigs/{config_id}/r - uns/{run_id}`` + Required. The field will contain name of the resource + requested, for example: ``projects/{project_id}/transferConfig + s/{config_id}/runs/{run_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest) ), @@ -2723,8 +2759,8 @@ Attributes: parent: - The BigQuery project id for which data sources should be - returned: ``projects/{project_id}``. + Required. The BigQuery project id for which data sources + should be returned: ``projects/{project_id}``. data_source_ids: When specified, only configurations of requested data sources are returned. @@ -2779,9 +2815,10 @@ Attributes: parent: - Name of transfer configuration for which transfer runs should - be retrieved. Format of transfer configuration resource name - is: ``projects/{project_id}/transferConfigs/{config_id}``. + Required. Name of transfer configuration for which transfer + runs should be retrieved. Format of transfer configuration + resource name is: + ``projects/{project_id}/transferConfigs/{config_id}``. states: When specified, only transfer runs with requested states are returned. @@ -2837,8 +2874,8 @@ Attributes: parent: - Transfer run name in the form: ``projects/{project_id}/transfe - rConfigs/{config_Id}/runs/{run_id}``. + Required. Transfer run name in the form: ``projects/{project_i + d}/transferConfigs/{config_Id}/runs/{run_id}``. page_token: Pagination token, which can be used to request a specific page of ``ListTransferLogsRequest`` list results. For multiple-page @@ -2896,7 +2933,7 @@ Attributes: name: - The data source in the form: + Required. The data source in the form: ``projects/{project_id}/dataSources/{data_source_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest) @@ -2933,13 +2970,13 @@ Attributes: parent: - Transfer configuration name in the form: + Required. Transfer configuration name in the form: ``projects/{project_id}/transferConfigs/{config_id}``. start_time: - Start time of the range of transfer runs. For example, - ``"2017-05-25T00:00:00+00:00"``. + Required. Start time of the range of transfer runs. For + example, ``"2017-05-25T00:00:00+00:00"``. end_time: - End time of the range of transfer runs. For example, + Required. End time of the range of transfer runs. For example, ``"2017-05-30T00:00:00+00:00"``. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest) @@ -3039,8 +3076,35 @@ DESCRIPTOR._options = None +_DATASOURCE.fields_by_name["name"]._options = None _DATASOURCE.fields_by_name["transfer_type"]._options = None _DATASOURCE.fields_by_name["supports_multiple_transfers"]._options = None +_DATASOURCE._options = None +_GETDATASOURCEREQUEST.fields_by_name["name"]._options = None +_LISTDATASOURCESREQUEST.fields_by_name["parent"]._options = None +_LISTDATASOURCESRESPONSE.fields_by_name["next_page_token"]._options = None +_CREATETRANSFERCONFIGREQUEST.fields_by_name["parent"]._options = None +_CREATETRANSFERCONFIGREQUEST.fields_by_name["transfer_config"]._options = None +_UPDATETRANSFERCONFIGREQUEST.fields_by_name["transfer_config"]._options = None +_UPDATETRANSFERCONFIGREQUEST.fields_by_name["update_mask"]._options = None +_GETTRANSFERCONFIGREQUEST.fields_by_name["name"]._options = None +_DELETETRANSFERCONFIGREQUEST.fields_by_name["name"]._options = None +_GETTRANSFERRUNREQUEST.fields_by_name["name"]._options = None +_DELETETRANSFERRUNREQUEST.fields_by_name["name"]._options = None +_LISTTRANSFERCONFIGSREQUEST.fields_by_name["parent"]._options = None +_LISTTRANSFERCONFIGSRESPONSE.fields_by_name["transfer_configs"]._options = None +_LISTTRANSFERCONFIGSRESPONSE.fields_by_name["next_page_token"]._options = None +_LISTTRANSFERRUNSREQUEST.fields_by_name["parent"]._options = None +_LISTTRANSFERRUNSRESPONSE.fields_by_name["transfer_runs"]._options = None +_LISTTRANSFERRUNSRESPONSE.fields_by_name["next_page_token"]._options = None +_LISTTRANSFERLOGSREQUEST.fields_by_name["parent"]._options = None +_LISTTRANSFERLOGSRESPONSE.fields_by_name["transfer_messages"]._options = None +_LISTTRANSFERLOGSRESPONSE.fields_by_name["next_page_token"]._options = None +_CHECKVALIDCREDSREQUEST.fields_by_name["name"]._options = None +_SCHEDULETRANSFERRUNSREQUEST.fields_by_name["parent"]._options = None +_SCHEDULETRANSFERRUNSREQUEST.fields_by_name["start_time"]._options = None +_SCHEDULETRANSFERRUNSREQUEST.fields_by_name["end_time"]._options = None +_STARTMANUALTRANSFERRUNSREQUEST.fields_by_name["parent"]._options = None _DATATRANSFERSERVICE = _descriptor.ServiceDescriptor( name="DataTransferService", @@ -3050,8 +3114,8 @@ serialized_options=_b( "\312A#bigquerydatatransfer.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=4581, - serialized_end=8454, + serialized_start=5538, + serialized_end=9573, methods=[ _descriptor.MethodDescriptor( name="GetDataSource", @@ -3061,7 +3125,7 @@ input_type=_GETDATASOURCEREQUEST, output_type=_DATASOURCE, serialized_options=_b( - "\202\323\344\223\002X\022//v1/{name=projects/*/locations/*/dataSources/*}Z%\022#/v1/{name=projects/*/dataSources/*}" + "\202\323\344\223\002X\022//v1/{name=projects/*/locations/*/dataSources/*}Z%\022#/v1/{name=projects/*/dataSources/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3072,7 +3136,7 @@ input_type=_LISTDATASOURCESREQUEST, output_type=_LISTDATASOURCESRESPONSE, serialized_options=_b( - "\202\323\344\223\002X\022//v1/{parent=projects/*/locations/*}/dataSourcesZ%\022#/v1/{parent=projects/*}/dataSources" + "\202\323\344\223\002X\022//v1/{parent=projects/*/locations/*}/dataSourcesZ%\022#/v1/{parent=projects/*}/dataSources\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3083,7 +3147,7 @@ input_type=_CREATETRANSFERCONFIGREQUEST, output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, serialized_options=_b( - '\202\323\344\223\002\202\001"3/v1/{parent=projects/*/locations/*}/transferConfigs:\017transfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\017transfer_config' + '\202\323\344\223\002\202\001"3/v1/{parent=projects/*/locations/*}/transferConfigs:\017transfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\017transfer_config\332A\026parent,transfer_config' ), ), _descriptor.MethodDescriptor( @@ -3094,7 +3158,7 @@ input_type=_UPDATETRANSFERCONFIGREQUEST, output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, serialized_options=_b( - "\202\323\344\223\002\242\0012C/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\017transfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\017transfer_config" + "\202\323\344\223\002\242\0012C/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\017transfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\017transfer_config\332A\033transfer_config,update_mask" ), ), _descriptor.MethodDescriptor( @@ -3105,7 +3169,7 @@ input_type=_DELETETRANSFERCONFIGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*'/v1/{name=projects/*/transferConfigs/*}" + "\202\323\344\223\002`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*'/v1/{name=projects/*/transferConfigs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3116,7 +3180,7 @@ input_type=_GETTRANSFERCONFIGREQUEST, output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, serialized_options=_b( - "\202\323\344\223\002`\0223/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\022'/v1/{name=projects/*/transferConfigs/*}" + "\202\323\344\223\002`\0223/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\022'/v1/{name=projects/*/transferConfigs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3127,7 +3191,7 @@ input_type=_LISTTRANSFERCONFIGSREQUEST, output_type=_LISTTRANSFERCONFIGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002`\0223/v1/{parent=projects/*/locations/*}/transferConfigsZ)\022'/v1/{parent=projects/*}/transferConfigs" + "\202\323\344\223\002`\0223/v1/{parent=projects/*/locations/*}/transferConfigsZ)\022'/v1/{parent=projects/*}/transferConfigs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3138,7 +3202,7 @@ input_type=_SCHEDULETRANSFERRUNSREQUEST, output_type=_SCHEDULETRANSFERRUNSRESPONSE, serialized_options=_b( - '\210\002\001\202\323\344\223\002\204\001"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\001*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\001*' + '\210\002\001\202\323\344\223\002\204\001"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\001*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\001*\332A\032parent,start_time,end_time' ), ), _descriptor.MethodDescriptor( @@ -3160,7 +3224,7 @@ input_type=_GETTRANSFERRUNREQUEST, output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN, serialized_options=_b( - "\202\323\344\223\002n\022:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\022./v1/{name=projects/*/transferConfigs/*/runs/*}" + "\202\323\344\223\002n\022:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\022./v1/{name=projects/*/transferConfigs/*/runs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3171,7 +3235,7 @@ input_type=_DELETETRANSFERRUNREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}" + "\202\323\344\223\002n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3182,7 +3246,7 @@ input_type=_LISTTRANSFERRUNSREQUEST, output_type=_LISTTRANSFERRUNSRESPONSE, serialized_options=_b( - "\202\323\344\223\002n\022:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\022./v1/{parent=projects/*/transferConfigs/*}/runs" + "\202\323\344\223\002n\022:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\022./v1/{parent=projects/*/transferConfigs/*}/runs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3193,7 +3257,7 @@ input_type=_LISTTRANSFERLOGSREQUEST, output_type=_LISTTRANSFERLOGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002\214\001\022I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\022=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs" + "\202\323\344\223\002\214\001\022I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\022=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3204,7 +3268,7 @@ input_type=_CHECKVALIDCREDSREQUEST, output_type=_CHECKVALIDCREDSRESPONSE, serialized_options=_b( - '\202\323\344\223\002~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\001*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\001*' + '\202\323\344\223\002~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\001*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\001*\332A\004name' ), ), ], diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto index 9501ea3cc3b3..b0982d286865 100644 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto +++ b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto @@ -17,7 +17,8 @@ syntax = "proto3"; package google.cloud.bigquery.datatransfer.v1; -import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/protobuf/struct.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; @@ -96,6 +97,11 @@ message ScheduleOptions { // `destination_dataset_id` is created when needed and shared with the // appropriate data source service account. message TransferConfig { + option (google.api.resource) = { + type: "bigquerydatatransfer.googleapis.com/TransferConfig" + pattern: "projects/{project}/transferConfigs/{transfer_config}" + }; + // The resource name of the transfer config. // Transfer config names have the form of // `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`. @@ -105,8 +111,11 @@ message TransferConfig { // required, will be generated for config_id. string name = 1; - // The BigQuery target dataset id. - string destination_dataset_id = 2; + // The desination of the transfer config. + oneof destination { + // The BigQuery target dataset id. + string destination_dataset_id = 2; + } // User specified display name for the data transfer. string display_name = 3; @@ -147,23 +156,28 @@ message TransferConfig { bool disabled = 13; // Output only. Data transfer modification time. Ignored by server on input. - google.protobuf.Timestamp update_time = 4; + google.protobuf.Timestamp update_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Next time when data transfer will run. - google.protobuf.Timestamp next_run_time = 8; + google.protobuf.Timestamp next_run_time = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. State of the most recently updated transfer run. - TransferState state = 10; + TransferState state = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Deprecated. Unique ID of the user on whose behalf transfer is done. int64 user_id = 11; // Output only. Region in which BigQuery dataset is located. - string dataset_region = 14; + string dataset_region = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Represents a data transfer run. message TransferRun { + option (google.api.resource) = { + type: "bigquerydatatransfer.googleapis.com/TransferRun" + pattern: "projects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}" + }; + // The resource name of the transfer run. // Transfer run names have the form // `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`. @@ -182,23 +196,26 @@ message TransferRun { // Output only. Time when transfer run was started. // Parameter ignored by server for input requests. - google.protobuf.Timestamp start_time = 4; + google.protobuf.Timestamp start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Time when transfer run ended. // Parameter ignored by server for input requests. - google.protobuf.Timestamp end_time = 5; + google.protobuf.Timestamp end_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Last time the data transfer run state was updated. - google.protobuf.Timestamp update_time = 6; + google.protobuf.Timestamp update_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Data transfer specific parameters. - google.protobuf.Struct params = 9; + google.protobuf.Struct params = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. The BigQuery target dataset id. - string destination_dataset_id = 2; + // Data transfer destination. + oneof destination { + // Output only. The BigQuery target dataset id. + string destination_dataset_id = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + } // Output only. Data source id. - string data_source_id = 7; + string data_source_id = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Data transfer run state. Ignored for input requests. TransferState state = 8; @@ -211,7 +228,7 @@ message TransferRun { // scheduled manually, this is empty. // NOTE: the system might choose to delay the schedule depending on the // current load, so `schedule_time` doesn't always match this. - string schedule = 12; + string schedule = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Represents a user facing message for a particular data transfer run. diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py index 437774a5f201..64927cc95851 100644 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py +++ b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py @@ -16,7 +16,8 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 @@ -30,10 +31,11 @@ "\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\242\002\005GCBDT\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1" ), serialized_pb=_b( - '\n:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\x90\x01\n\x0fScheduleOptions\x12\x1f\n\x17\x64isable_auto_scheduling\x18\x03 \x01(\x08\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xff\x03\n\x0eTransferConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1e\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x05 \x01(\t\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x10\n\x08schedule\x18\x07 \x01(\t\x12P\n\x10schedule_options\x18\x18 \x01(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.ScheduleOptions\x12 \n\x18\x64\x61ta_refresh_window_days\x18\x0c \x01(\x05\x12\x10\n\x08\x64isabled\x18\r \x01(\x08\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rnext_run_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x43\n\x05state\x18\n \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x16\n\x0e\x64\x61taset_region\x18\x0e \x01(\t"\xfe\x03\n\x0bTransferRun\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08run_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\x0c\x65rror_status\x18\x15 \x01(\x0b\x32\x12.google.rpc.Status\x12.\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x1e\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x07 \x01(\t\x12\x43\n\x05state\x18\x08 \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x10\n\x08schedule\x18\x0c \x01(\t"\x8a\x02\n\x0fTransferMessage\x12\x30\n\x0cmessage_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x08severity\x18\x02 \x01(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\x12\x14\n\x0cmessage_text\x18\x03 \x01(\t"U\n\x0fMessageSeverity\x12 \n\x1cMESSAGE_SEVERITY_UNSPECIFIED\x10\x00\x12\x08\n\x04INFO\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03*K\n\x0cTransferType\x12\x1d\n\x19TRANSFER_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41TCH\x10\x01\x12\r\n\tSTREAMING\x10\x02\x1a\x02\x18\x01*s\n\rTransferState\x12\x1e\n\x1aTRANSFER_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\r\n\tCANCELLED\x10\x06\x42\xe7\x01\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xa2\x02\x05GCBDT\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3' + '\n:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\x90\x01\n\x0fScheduleOptions\x12\x1f\n\x17\x64isable_auto_scheduling\x18\x03 \x01(\x08\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\x93\x05\n\x0eTransferConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12 \n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x05 \x01(\t\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x10\n\x08schedule\x18\x07 \x01(\t\x12P\n\x10schedule_options\x18\x18 \x01(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.ScheduleOptions\x12 \n\x18\x64\x61ta_refresh_window_days\x18\x0c \x01(\x05\x12\x10\n\x08\x64isabled\x18\r \x01(\x08\x12\x34\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x36\n\rnext_run_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x05state\x18\n \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferStateB\x03\xe0\x41\x03\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x1b\n\x0e\x64\x61taset_region\x18\x0e \x01(\tB\x03\xe0\x41\x03:m\xea\x41j\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x34projects/{project}/transferConfigs/{transfer_config}B\r\n\x0b\x64\x65stination"\xc0\x05\n\x0bTransferRun\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08run_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\x0c\x65rror_status\x18\x15 \x01(\x0b\x32\x12.google.rpc.Status\x12\x33\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12,\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.StructB\x03\xe0\x41\x03\x12%\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x03H\x00\x12\x1b\n\x0e\x64\x61ta_source_id\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x05state\x18\x08 \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x15\n\x08schedule\x18\x0c \x01(\tB\x03\xe0\x41\x03:\x8b\x01\xea\x41\x87\x01\n/bigquerydatatransfer.googleapis.com/TransferRun\x12Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}B\r\n\x0b\x64\x65stination"\x8a\x02\n\x0fTransferMessage\x12\x30\n\x0cmessage_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x08severity\x18\x02 \x01(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\x12\x14\n\x0cmessage_text\x18\x03 \x01(\t"U\n\x0fMessageSeverity\x12 \n\x1cMESSAGE_SEVERITY_UNSPECIFIED\x10\x00\x12\x08\n\x04INFO\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03*K\n\x0cTransferType\x12\x1d\n\x19TRANSFER_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41TCH\x10\x01\x12\r\n\tSTREAMING\x10\x02\x1a\x02\x18\x01*s\n\rTransferState\x12\x1e\n\x1aTRANSFER_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\r\n\tCANCELLED\x10\x06\x42\xe7\x01\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xa2\x02\x05GCBDT\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_rpc_dot_status__pb2.DESCRIPTOR, @@ -62,8 +64,8 @@ ], containing_type=None, serialized_options=_b("\030\001"), - serialized_start=1662, - serialized_end=1737, + serialized_start=2034, + serialized_end=2109, ) _sym_db.RegisterEnumDescriptor(_TRANSFERTYPE) @@ -99,8 +101,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1739, - serialized_end=1854, + serialized_start=2111, + serialized_end=2226, ) _sym_db.RegisterEnumDescriptor(_TRANSFERSTATE) @@ -141,8 +143,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1575, - serialized_end=1660, + serialized_start=1947, + serialized_end=2032, ) _sym_db.RegisterEnumDescriptor(_TRANSFERMESSAGE_MESSAGESEVERITY) @@ -217,8 +219,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=220, - serialized_end=364, + serialized_start=250, + serialized_end=394, ) @@ -406,7 +408,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -424,7 +426,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -442,7 +444,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -478,20 +480,30 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Aj\n2bigquerydatatransfer.googleapis.com/TransferConfig\0224projects/{project}/transferConfigs/{transfer_config}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], - oneofs=[], - serialized_start=367, - serialized_end=878, + oneofs=[ + _descriptor.OneofDescriptor( + name="destination", + full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.destination", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=397, + serialized_end=1056, ) @@ -589,7 +601,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -607,7 +619,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -625,7 +637,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -643,7 +655,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -661,7 +673,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -679,7 +691,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -733,20 +745,30 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A\207\001\n/bigquerydatatransfer.googleapis.com/TransferRun\022Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], - oneofs=[], - serialized_start=881, - serialized_end=1391, + oneofs=[ + _descriptor.OneofDescriptor( + name="destination", + full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.destination", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1059, + serialized_end=1763, ) @@ -820,8 +842,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1394, - serialized_end=1660, + serialized_start=1766, + serialized_end=2032, ) _SCHEDULEOPTIONS.fields_by_name[ @@ -841,6 +863,12 @@ "next_run_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _TRANSFERCONFIG.fields_by_name["state"].enum_type = _TRANSFERSTATE +_TRANSFERCONFIG.oneofs_by_name["destination"].fields.append( + _TRANSFERCONFIG.fields_by_name["destination_dataset_id"] +) +_TRANSFERCONFIG.fields_by_name[ + "destination_dataset_id" +].containing_oneof = _TRANSFERCONFIG.oneofs_by_name["destination"] _TRANSFERRUN.fields_by_name[ "schedule_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP @@ -863,6 +891,12 @@ "params" ].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT _TRANSFERRUN.fields_by_name["state"].enum_type = _TRANSFERSTATE +_TRANSFERRUN.oneofs_by_name["destination"].fields.append( + _TRANSFERRUN.fields_by_name["destination_dataset_id"] +) +_TRANSFERRUN.fields_by_name[ + "destination_dataset_id" +].containing_oneof = _TRANSFERRUN.oneofs_by_name["destination"] _TRANSFERMESSAGE.fields_by_name[ "message_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP @@ -934,6 +968,8 @@ If config\_id is not provided, usually a uuid, even though it is not guaranteed or required, will be generated for config\_id. + destination: + The desination of the transfer config. destination_dataset_id: The BigQuery target dataset id. display_name: @@ -1017,6 +1053,8 @@ updated. params: Output only. Data transfer specific parameters. + destination: + Data transfer destination. destination_dataset_id: Output only. The BigQuery target dataset id. data_source_id: @@ -1063,4 +1101,17 @@ DESCRIPTOR._options = None _TRANSFERTYPE._options = None +_TRANSFERCONFIG.fields_by_name["update_time"]._options = None +_TRANSFERCONFIG.fields_by_name["next_run_time"]._options = None +_TRANSFERCONFIG.fields_by_name["state"]._options = None +_TRANSFERCONFIG.fields_by_name["dataset_region"]._options = None +_TRANSFERCONFIG._options = None +_TRANSFERRUN.fields_by_name["start_time"]._options = None +_TRANSFERRUN.fields_by_name["end_time"]._options = None +_TRANSFERRUN.fields_by_name["update_time"]._options = None +_TRANSFERRUN.fields_by_name["params"]._options = None +_TRANSFERRUN.fields_by_name["destination_dataset_id"]._options = None +_TRANSFERRUN.fields_by_name["data_source_id"]._options = None +_TRANSFERRUN.fields_by_name["schedule"]._options = None +_TRANSFERRUN._options = None # @@protoc_insertion_point(module_scope) diff --git a/bigquery_datatransfer/synth.metadata b/bigquery_datatransfer/synth.metadata index 1643741ae8a9..8ab46cb47b0b 100644 --- a/bigquery_datatransfer/synth.metadata +++ b/bigquery_datatransfer/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-03T12:11:16.935211Z", + "updateTime": "2019-09-27T12:12:10.005793Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.37.1", + "dockerImage": "googleapis/artman@sha256:6068f67900a3f0bdece596b97bda8fc70406ca0e137a941f4c81d3217c994a80" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "7b212a8d2319cd81a7b6942c25dbf4550480a06c", - "internalRef": "261339454" + "sha": "cd112d8d255e0099df053643d4bd12c228ef7b1b", + "internalRef": "271468707" } }, { diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py index e44e74830031..95e08647313f 100644 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py +++ b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py @@ -241,18 +241,18 @@ def create_read_session( parent (str): Required. String of the form ``projects/{project_id}`` indicating the project this ReadSession is associated with. This is the project that will be billed for usage. - table_modifiers (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.TableModifiers]): Optional. Any modifiers to the Table (e.g. snapshot timestamp). + table_modifiers (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.TableModifiers]): Any modifiers to the Table (e.g. snapshot timestamp). If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_storage_v1beta1.types.TableModifiers` - requested_streams (int): Optional. Initial number of streams. If unset or 0, we will + requested_streams (int): Initial number of streams. If unset or 0, we will provide a value of streams so as to produce reasonable throughput. Must be non-negative. The number of streams may be lower than the requested number, depending on the amount parallelism that is reasonable for the table and the maximum amount of parallelism allowed by the system. Streams must be read starting from offset 0. - read_options (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.TableReadOptions]): Optional. Read options for this session (e.g. column selection, filters). + read_options (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.TableReadOptions]): Read options for this session (e.g. column selection, filters). If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_storage_v1beta1.types.TableReadOptions` diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py index bf3b8b6f5b24..d799b3c21edf 100644 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py +++ b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py @@ -33,6 +33,7 @@ class BigQueryStorageGrpcTransport(object): # in this service. _OAUTH_SCOPES = ( "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/bigquery.readonly", "https://www.googleapis.com/auth/cloud-platform", ) diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage.proto b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage.proto index fa2de616d4f7..22f742fbb654 100644 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage.proto +++ b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage.proto @@ -18,13 +18,15 @@ syntax = "proto3"; package google.cloud.bigquery.storage.v1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/bigquery/storage/v1beta1/arrow.proto"; import "google/cloud/bigquery/storage/v1beta1/avro.proto"; import "google/cloud/bigquery/storage/v1beta1/read_options.proto"; import "google/cloud/bigquery/storage/v1beta1/table_reference.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; -import "google/api/client.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; option java_package = "com.google.cloud.bigquery.storage.v1beta1"; @@ -36,6 +38,7 @@ service BigQueryStorage { option (google.api.default_host) = "bigquerystorage.googleapis.com"; option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/bigquery," + "https://www.googleapis.com/auth/bigquery.readonly," "https://www.googleapis.com/auth/cloud-platform"; // Creates a new read session. A read session divides the contents of a @@ -59,6 +62,7 @@ service BigQueryStorage { body: "*" } }; + option (google.api.method_signature) = "table_reference,parent,requested_streams"; } // Reads rows from the table in the format prescribed by the read session. @@ -74,6 +78,7 @@ service BigQueryStorage { option (google.api.http) = { get: "/v1beta1/{read_position.stream.name=projects/*/streams/*}" }; + option (google.api.method_signature) = "read_position"; } // Creates additional streams for a ReadSession. This API can be used to @@ -84,6 +89,7 @@ service BigQueryStorage { post: "/v1beta1/{session.name=projects/*/sessions/*}" body: "*" }; + option (google.api.method_signature) = "session,requested_streams"; } // Triggers the graceful termination of a single stream in a ReadSession. This @@ -105,6 +111,7 @@ service BigQueryStorage { post: "/v1beta1/{stream.name=projects/*/streams/*}" body: "*" }; + option (google.api.method_signature) = "stream"; } // Splits a given read stream into two Streams. These streams are referred to @@ -124,11 +131,17 @@ service BigQueryStorage { option (google.api.http) = { get: "/v1beta1/{original_stream.name=projects/*/streams/*}" }; + option (google.api.method_signature) = "original_stream"; } } // Information about a single data stream within a read session. message Stream { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/Stream" + pattern: "projects/{project}/locations/{location}/streams/{stream}" + }; + // Name of the stream, in the form // `projects/{project_id}/locations/{location}/streams/{stream_id}`. string name = 1; @@ -145,6 +158,11 @@ message StreamPosition { // Information returned from a `CreateReadSession` request. message ReadSession { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/ReadSession" + pattern: "projects/{project}/locations/{location}/sessions/{session}" + }; + // Unique identifier for the session, in the form // `projects/{project_id}/locations/{location}/sessions/{session_id}`. string name = 1; @@ -181,17 +199,17 @@ message ReadSession { // requested parallelism, projection filters and constraints. message CreateReadSessionRequest { // Required. Reference to the table to read. - TableReference table_reference = 1; + TableReference table_reference = 1 [(google.api.field_behavior) = REQUIRED]; // Required. String of the form `projects/{project_id}` indicating the // project this ReadSession is associated with. This is the project that will // be billed for usage. - string parent = 6; + string parent = 6 [(google.api.field_behavior) = REQUIRED]; - // Optional. Any modifiers to the Table (e.g. snapshot timestamp). + // Any modifiers to the Table (e.g. snapshot timestamp). TableModifiers table_modifiers = 2; - // Optional. Initial number of streams. If unset or 0, we will + // Initial number of streams. If unset or 0, we will // provide a value of streams so as to produce reasonable throughput. Must be // non-negative. The number of streams may be lower than the requested number, // depending on the amount parallelism that is reasonable for the table and @@ -200,7 +218,7 @@ message CreateReadSessionRequest { // Streams must be read starting from offset 0. int32 requested_streams = 3; - // Optional. Read options for this session (e.g. column selection, filters). + // Read options for this session (e.g. column selection, filters). TableReadOptions read_options = 4; // Data output format. Currently default to Avro. @@ -248,7 +266,7 @@ message ReadRowsRequest { // Required. Identifier of the position in the stream to start reading from. // The offset requested must be less than the last row read from ReadRows. // Requesting a larger offset is undefined. - StreamPosition read_position = 1; + StreamPosition read_position = 1 [(google.api.field_behavior) = REQUIRED]; } // Progress information for a given Stream. @@ -267,6 +285,12 @@ message StreamStatus { // sharding strategy. float fraction_consumed = 2; + // Represents the progress of the current stream. + // + // Note: This value is under development and should not be used. Use + // `fraction_consumed` instead. + Progress progress = 4; + // Whether this stream can be split. For sessions that use the LIQUID sharding // strategy, this value is always false. For BALANCED sessions, this value is // false when enough data have been read such that no more splits are possible @@ -275,6 +299,25 @@ message StreamStatus { bool is_splittable = 3; } +message Progress { + // The fraction of rows assigned to the stream that have been processed by the + // server so far, not including the rows in the current response message. + // + // This value, along with `at_response_end`, can be used to interpolate the + // progress made as the rows in the message are being processed using the + // following formula: `at_response_start + (at_response_end - + // at_response_start) * rows_processed_from_response / rows_in_response`. + // + // Note that if a filter is provided, the `at_response_end` value of the + // previous response may not necessarily be equal to the `at_response_start` + // value of the current response. + float at_response_start = 1; + + // Similar to `at_response_start`, except that this value includes the rows in + // the current response. + float at_response_end = 2; +} + // Information on if the current connection is being throttled. message ThrottleStatus { // How much this connection is being throttled. @@ -313,12 +356,12 @@ message ReadRowsResponse { message BatchCreateReadSessionStreamsRequest { // Required. Must be a non-expired session obtained from a call to // CreateReadSession. Only the name field needs to be set. - ReadSession session = 1; + ReadSession session = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Number of new streams requested. Must be positive. // Number of added streams may be less than this, see CreateReadSessionRequest // for more information. - int32 requested_streams = 2; + int32 requested_streams = 2 [(google.api.field_behavior) = REQUIRED]; } // The response from `BatchCreateReadSessionStreams` returns the stream diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py index ec546f3f9e81..500d277c6cf5 100644 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py +++ b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py @@ -17,6 +17,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.bigquery_storage_v1beta1.proto import ( arrow_pb2 as google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_arrow__pb2, ) @@ -31,7 +34,6 @@ ) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -42,17 +44,19 @@ "\n)com.google.cloud.bigquery.storage.v1beta1ZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage" ), serialized_pb=_b( - '\n9google/cloud/bigquery/storage_v1beta1/proto/storage.proto\x12%google.cloud.bigquery.storage.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x37google/cloud/bigquery/storage_v1beta1/proto/arrow.proto\x1a\x36google/cloud/bigquery/storage_v1beta1/proto/avro.proto\x1a>google/cloud/bigquery/storage_v1beta1/proto/read_options.proto\x1a\x41google/cloud/bigquery/storage_v1beta1/proto/table_reference.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/api/client.proto"\x16\n\x06Stream\x12\x0c\n\x04name\x18\x01 \x01(\t"_\n\x0eStreamPosition\x12=\n\x06stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12\x0e\n\x06offset\x18\x02 \x01(\x03"\xa0\x04\n\x0bReadSession\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12H\n\x0b\x61vro_schema\x18\x05 \x01(\x0b\x32\x31.google.cloud.bigquery.storage.v1beta1.AvroSchemaH\x00\x12J\n\x0c\x61rrow_schema\x18\x06 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1beta1.ArrowSchemaH\x00\x12>\n\x07streams\x18\x04 \x03(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12N\n\x0ftable_reference\x18\x07 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableReference\x12N\n\x0ftable_modifiers\x18\x08 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableModifiers\x12R\n\x11sharding_strategy\x18\t \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1beta1.ShardingStrategyB\x08\n\x06schema"\xcb\x03\n\x18\x43reateReadSessionRequest\x12N\n\x0ftable_reference\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableReference\x12\x0e\n\x06parent\x18\x06 \x01(\t\x12N\n\x0ftable_modifiers\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableModifiers\x12\x19\n\x11requested_streams\x18\x03 \x01(\x05\x12M\n\x0cread_options\x18\x04 \x01(\x0b\x32\x37.google.cloud.bigquery.storage.v1beta1.TableReadOptions\x12\x41\n\x06\x66ormat\x18\x05 \x01(\x0e\x32\x31.google.cloud.bigquery.storage.v1beta1.DataFormat\x12R\n\x11sharding_strategy\x18\x07 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1beta1.ShardingStrategy"_\n\x0fReadRowsRequest\x12L\n\rread_position\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.StreamPosition"]\n\x0cStreamStatus\x12\x1b\n\x13\x65stimated_row_count\x18\x01 \x01(\x03\x12\x19\n\x11\x66raction_consumed\x18\x02 \x01(\x02\x12\x15\n\ris_splittable\x18\x03 \x01(\x08"*\n\x0eThrottleStatus\x12\x18\n\x10throttle_percent\x18\x01 \x01(\x05"\xdf\x02\n\x10ReadRowsResponse\x12\x44\n\tavro_rows\x18\x03 \x01(\x0b\x32/.google.cloud.bigquery.storage.v1beta1.AvroRowsH\x00\x12U\n\x12\x61rrow_record_batch\x18\x04 \x01(\x0b\x32\x37.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatchH\x00\x12\x11\n\trow_count\x18\x06 \x01(\x03\x12\x43\n\x06status\x18\x02 \x01(\x0b\x32\x33.google.cloud.bigquery.storage.v1beta1.StreamStatus\x12N\n\x0fthrottle_status\x18\x05 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.ThrottleStatusB\x06\n\x04rows"\x86\x01\n$BatchCreateReadSessionStreamsRequest\x12\x43\n\x07session\x18\x01 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1beta1.ReadSession\x12\x19\n\x11requested_streams\x18\x02 \x01(\x05"g\n%BatchCreateReadSessionStreamsResponse\x12>\n\x07streams\x18\x01 \x03(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream"V\n\x15\x46inalizeStreamRequest\x12=\n\x06stream\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream"r\n\x16SplitReadStreamRequest\x12\x46\n\x0foriginal_stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12\x10\n\x08\x66raction\x18\x02 \x01(\x02"\xa9\x01\n\x17SplitReadStreamResponse\x12\x45\n\x0eprimary_stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12G\n\x10remainder_stream\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream*>\n\nDataFormat\x12\x1b\n\x17\x44\x41TA_FORMAT_UNSPECIFIED\x10\x00\x12\x08\n\x04\x41VRO\x10\x01\x12\t\n\x05\x41RROW\x10\x03*O\n\x10ShardingStrategy\x12!\n\x1dSHARDING_STRATEGY_UNSPECIFIED\x10\x00\x12\n\n\x06LIQUID\x10\x01\x12\x0c\n\x08\x42\x41LANCED\x10\x02\x32\xc4\t\n\x0f\x42igQueryStorage\x12\x87\x02\n\x11\x43reateReadSession\x12?.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest\x1a\x32.google.cloud.bigquery.storage.v1beta1.ReadSession"}\x82\xd3\xe4\x93\x02w"0/v1beta1/{table_reference.project_id=projects/*}:\x01*Z@";/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}:\x01*\x12\xc0\x01\n\x08ReadRows\x12\x36.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest\x1a\x37.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse"A\x82\xd3\xe4\x93\x02;\x12\x39/v1beta1/{read_position.stream.name=projects/*/streams/*}0\x01\x12\xf4\x01\n\x1d\x42\x61tchCreateReadSessionStreams\x12K.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest\x1aL.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse"8\x82\xd3\xe4\x93\x02\x32"-/v1beta1/{session.name=projects/*/sessions/*}:\x01*\x12\x9e\x01\n\x0e\x46inalizeStream\x12<.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest\x1a\x16.google.protobuf.Empty"6\x82\xd3\xe4\x93\x02\x30"+/v1beta1/{stream.name=projects/*/streams/*}:\x01*\x12\xce\x01\n\x0fSplitReadStream\x12=.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest\x1a>.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse"<\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta1/{original_stream.name=projects/*/streams/*}\x1a{\xca\x41\x1e\x62igquerystorage.googleapis.com\xd2\x41Whttps://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platformBy\n)com.google.cloud.bigquery.storage.v1beta1ZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' + '\n9google/cloud/bigquery/storage_v1beta1/proto/storage.proto\x12%google.cloud.bigquery.storage.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x37google/cloud/bigquery/storage_v1beta1/proto/arrow.proto\x1a\x36google/cloud/bigquery/storage_v1beta1/proto/avro.proto\x1a>google/cloud/bigquery/storage_v1beta1/proto/read_options.proto\x1a\x41google/cloud/bigquery/storage_v1beta1/proto/table_reference.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"|\n\x06Stream\x12\x0c\n\x04name\x18\x01 \x01(\t:d\xea\x41\x61\n%bigquerystorage.googleapis.com/Stream\x12\x38projects/{project}/locations/{location}/streams/{stream}"_\n\x0eStreamPosition\x12=\n\x06stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12\x0e\n\x06offset\x18\x02 \x01(\x03"\x8d\x05\n\x0bReadSession\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12H\n\x0b\x61vro_schema\x18\x05 \x01(\x0b\x32\x31.google.cloud.bigquery.storage.v1beta1.AvroSchemaH\x00\x12J\n\x0c\x61rrow_schema\x18\x06 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1beta1.ArrowSchemaH\x00\x12>\n\x07streams\x18\x04 \x03(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12N\n\x0ftable_reference\x18\x07 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableReference\x12N\n\x0ftable_modifiers\x18\x08 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableModifiers\x12R\n\x11sharding_strategy\x18\t \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1beta1.ShardingStrategy:k\xea\x41h\n*bigquerystorage.googleapis.com/ReadSession\x12:projects/{project}/locations/{location}/sessions/{session}B\x08\n\x06schema"\xd5\x03\n\x18\x43reateReadSessionRequest\x12S\n\x0ftable_reference\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableReferenceB\x03\xe0\x41\x02\x12\x13\n\x06parent\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12N\n\x0ftable_modifiers\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableModifiers\x12\x19\n\x11requested_streams\x18\x03 \x01(\x05\x12M\n\x0cread_options\x18\x04 \x01(\x0b\x32\x37.google.cloud.bigquery.storage.v1beta1.TableReadOptions\x12\x41\n\x06\x66ormat\x18\x05 \x01(\x0e\x32\x31.google.cloud.bigquery.storage.v1beta1.DataFormat\x12R\n\x11sharding_strategy\x18\x07 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1beta1.ShardingStrategy"d\n\x0fReadRowsRequest\x12Q\n\rread_position\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.StreamPositionB\x03\xe0\x41\x02"\xa0\x01\n\x0cStreamStatus\x12\x1b\n\x13\x65stimated_row_count\x18\x01 \x01(\x03\x12\x19\n\x11\x66raction_consumed\x18\x02 \x01(\x02\x12\x41\n\x08progress\x18\x04 \x01(\x0b\x32/.google.cloud.bigquery.storage.v1beta1.Progress\x12\x15\n\ris_splittable\x18\x03 \x01(\x08">\n\x08Progress\x12\x19\n\x11\x61t_response_start\x18\x01 \x01(\x02\x12\x17\n\x0f\x61t_response_end\x18\x02 \x01(\x02"*\n\x0eThrottleStatus\x12\x18\n\x10throttle_percent\x18\x01 \x01(\x05"\xdf\x02\n\x10ReadRowsResponse\x12\x44\n\tavro_rows\x18\x03 \x01(\x0b\x32/.google.cloud.bigquery.storage.v1beta1.AvroRowsH\x00\x12U\n\x12\x61rrow_record_batch\x18\x04 \x01(\x0b\x32\x37.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatchH\x00\x12\x11\n\trow_count\x18\x06 \x01(\x03\x12\x43\n\x06status\x18\x02 \x01(\x0b\x32\x33.google.cloud.bigquery.storage.v1beta1.StreamStatus\x12N\n\x0fthrottle_status\x18\x05 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.ThrottleStatusB\x06\n\x04rows"\x90\x01\n$BatchCreateReadSessionStreamsRequest\x12H\n\x07session\x18\x01 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1beta1.ReadSessionB\x03\xe0\x41\x02\x12\x1e\n\x11requested_streams\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02"g\n%BatchCreateReadSessionStreamsResponse\x12>\n\x07streams\x18\x01 \x03(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream"V\n\x15\x46inalizeStreamRequest\x12=\n\x06stream\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream"r\n\x16SplitReadStreamRequest\x12\x46\n\x0foriginal_stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12\x10\n\x08\x66raction\x18\x02 \x01(\x02"\xa9\x01\n\x17SplitReadStreamResponse\x12\x45\n\x0eprimary_stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12G\n\x10remainder_stream\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream*>\n\nDataFormat\x12\x1b\n\x17\x44\x41TA_FORMAT_UNSPECIFIED\x10\x00\x12\x08\n\x04\x41VRO\x10\x01\x12\t\n\x05\x41RROW\x10\x03*O\n\x10ShardingStrategy\x12!\n\x1dSHARDING_STRATEGY_UNSPECIFIED\x10\x00\x12\n\n\x06LIQUID\x10\x01\x12\x0c\n\x08\x42\x41LANCED\x10\x02\x32\xeb\n\n\x0f\x42igQueryStorage\x12\xb3\x02\n\x11\x43reateReadSession\x12?.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest\x1a\x32.google.cloud.bigquery.storage.v1beta1.ReadSession"\xa8\x01\x82\xd3\xe4\x93\x02w"0/v1beta1/{table_reference.project_id=projects/*}:\x01*Z@";/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}:\x01*\xda\x41(table_reference,parent,requested_streams\x12\xd0\x01\n\x08ReadRows\x12\x36.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest\x1a\x37.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse"Q\x82\xd3\xe4\x93\x02;\x12\x39/v1beta1/{read_position.stream.name=projects/*/streams/*}\xda\x41\rread_position0\x01\x12\x90\x02\n\x1d\x42\x61tchCreateReadSessionStreams\x12K.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest\x1aL.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse"T\x82\xd3\xe4\x93\x02\x32"-/v1beta1/{session.name=projects/*/sessions/*}:\x01*\xda\x41\x19session,requested_streams\x12\xa7\x01\n\x0e\x46inalizeStream\x12<.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest\x1a\x16.google.protobuf.Empty"?\x82\xd3\xe4\x93\x02\x30"+/v1beta1/{stream.name=projects/*/streams/*}:\x01*\xda\x41\x06stream\x12\xe0\x01\n\x0fSplitReadStream\x12=.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest\x1a>.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse"N\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta1/{original_stream.name=projects/*/streams/*}\xda\x41\x0foriginal_stream\x1a\xae\x01\xca\x41\x1e\x62igquerystorage.googleapis.com\xd2\x41\x89\x01https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platformBy\n)com.google.cloud.bigquery.storage.v1beta1ZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_arrow__pb2.DESCRIPTOR, google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_avro__pb2.DESCRIPTOR, google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_read__options__pb2.DESCRIPTOR, google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_table__reference__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -78,8 +82,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2799, - serialized_end=2861, + serialized_start=3227, + serialized_end=3289, ) _sym_db.RegisterEnumDescriptor(_DATAFORMAT) @@ -106,8 +110,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2863, - serialized_end=2942, + serialized_start=3291, + serialized_end=3370, ) _sym_db.RegisterEnumDescriptor(_SHARDINGSTRATEGY) @@ -149,13 +153,15 @@ extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Aa\n%bigquerystorage.googleapis.com/Stream\0228projects/{project}/locations/{location}/streams/{stream}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=461, - serialized_end=483, + serialized_start=521, + serialized_end=645, ) @@ -211,8 +217,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=485, - serialized_end=580, + serialized_start=647, + serialized_end=742, ) @@ -371,7 +377,9 @@ extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Ah\n*bigquerystorage.googleapis.com/ReadSession\022:projects/{project}/locations/{location}/sessions/{session}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -384,8 +392,8 @@ fields=[], ) ], - serialized_start=583, - serialized_end=1127, + serialized_start=745, + serialized_end=1398, ) @@ -411,7 +419,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -429,7 +437,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -531,8 +539,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1130, - serialized_end=1589, + serialized_start=1401, + serialized_end=1870, ) @@ -558,7 +566,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -570,8 +578,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1591, - serialized_end=1686, + serialized_start=1872, + serialized_end=1972, ) @@ -618,10 +626,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="progress", + full_name="google.cloud.bigquery.storage.v1beta1.StreamStatus.progress", + index=2, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="is_splittable", full_name="google.cloud.bigquery.storage.v1beta1.StreamStatus.is_splittable", - index=2, + index=3, number=3, type=8, cpp_type=7, @@ -645,8 +671,65 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1688, - serialized_end=1781, + serialized_start=1975, + serialized_end=2135, +) + + +_PROGRESS = _descriptor.Descriptor( + name="Progress", + full_name="google.cloud.bigquery.storage.v1beta1.Progress", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="at_response_start", + full_name="google.cloud.bigquery.storage.v1beta1.Progress.at_response_start", + index=0, + number=1, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="at_response_end", + full_name="google.cloud.bigquery.storage.v1beta1.Progress.at_response_end", + index=1, + number=2, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2137, + serialized_end=2199, ) @@ -684,8 +767,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1783, - serialized_end=1825, + serialized_start=2201, + serialized_end=2243, ) @@ -803,8 +886,8 @@ fields=[], ) ], - serialized_start=1828, - serialized_end=2179, + serialized_start=2246, + serialized_end=2597, ) @@ -830,7 +913,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -848,7 +931,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -860,8 +943,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2182, - serialized_end=2316, + serialized_start=2600, + serialized_end=2744, ) @@ -899,8 +982,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2318, - serialized_end=2421, + serialized_start=2746, + serialized_end=2849, ) @@ -938,8 +1021,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2423, - serialized_end=2509, + serialized_start=2851, + serialized_end=2937, ) @@ -995,8 +1078,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2511, - serialized_end=2625, + serialized_start=2939, + serialized_end=3053, ) @@ -1052,8 +1135,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2628, - serialized_end=2797, + serialized_start=3056, + serialized_end=3225, ) _STREAMPOSITION.fields_by_name["stream"].message_type = _STREAM @@ -1114,6 +1197,7 @@ "sharding_strategy" ].enum_type = _SHARDINGSTRATEGY _READROWSREQUEST.fields_by_name["read_position"].message_type = _STREAMPOSITION +_STREAMSTATUS.fields_by_name["progress"].message_type = _PROGRESS _READROWSRESPONSE.fields_by_name[ "avro_rows" ].message_type = ( @@ -1152,6 +1236,7 @@ DESCRIPTOR.message_types_by_name["CreateReadSessionRequest"] = _CREATEREADSESSIONREQUEST DESCRIPTOR.message_types_by_name["ReadRowsRequest"] = _READROWSREQUEST DESCRIPTOR.message_types_by_name["StreamStatus"] = _STREAMSTATUS +DESCRIPTOR.message_types_by_name["Progress"] = _PROGRESS DESCRIPTOR.message_types_by_name["ThrottleStatus"] = _THROTTLESTATUS DESCRIPTOR.message_types_by_name["ReadRowsResponse"] = _READROWSRESPONSE DESCRIPTOR.message_types_by_name[ @@ -1263,19 +1348,18 @@ indicating the project this ReadSession is associated with. This is the project that will be billed for usage. table_modifiers: - Optional. Any modifiers to the Table (e.g. snapshot - timestamp). + Any modifiers to the Table (e.g. snapshot timestamp). requested_streams: - Optional. Initial number of streams. If unset or 0, we will - provide a value of streams so as to produce reasonable - throughput. Must be non-negative. The number of streams may be - lower than the requested number, depending on the amount - parallelism that is reasonable for the table and the maximum - amount of parallelism allowed by the system. Streams must be - read starting from offset 0. + Initial number of streams. If unset or 0, we will provide a + value of streams so as to produce reasonable throughput. Must + be non-negative. The number of streams may be lower than the + requested number, depending on the amount parallelism that is + reasonable for the table and the maximum amount of parallelism + allowed by the system. Streams must be read starting from + offset 0. read_options: - Optional. Read options for this session (e.g. column - selection, filters). + Read options for this session (e.g. column selection, + filters). format: Data output format. Currently default to Avro. sharding_strategy: @@ -1331,6 +1415,10 @@ progress through the pre-filtering rows. This value is only populated for sessions created through the BALANCED sharding strategy. + progress: + Represents the progress of the current stream. Note: This + value is under development and should not be used. Use + ``fraction_consumed`` instead. is_splittable: Whether this stream can be split. For sessions that use the LIQUID sharding strategy, this value is always false. For @@ -1344,6 +1432,36 @@ ) _sym_db.RegisterMessage(StreamStatus) +Progress = _reflection.GeneratedProtocolMessageType( + "Progress", + (_message.Message,), + dict( + DESCRIPTOR=_PROGRESS, + __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", + __doc__="""Protocol buffer. + + Attributes: + at_response_start: + The fraction of rows assigned to the stream that have been + processed by the server so far, not including the rows in the + current response message. This value, along with + ``at_response_end``, can be used to interpolate the progress + made as the rows in the message are being processed using the + following formula: ``at_response_start + (at_response_end - + at_response_start) * rows_processed_from_response / + rows_in_response``. Note that if a filter is provided, the + ``at_response_end`` value of the previous response may not + necessarily be equal to the ``at_response_start`` value of the + current response. + at_response_end: + Similar to ``at_response_start``, except that this value + includes the rows in the current response. + """, + # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.Progress) + ), +) +_sym_db.RegisterMessage(Progress) + ThrottleStatus = _reflection.GeneratedProtocolMessageType( "ThrottleStatus", (_message.Message,), @@ -1513,6 +1631,15 @@ DESCRIPTOR._options = None +_STREAM._options = None +_READSESSION._options = None +_CREATEREADSESSIONREQUEST.fields_by_name["table_reference"]._options = None +_CREATEREADSESSIONREQUEST.fields_by_name["parent"]._options = None +_READROWSREQUEST.fields_by_name["read_position"]._options = None +_BATCHCREATEREADSESSIONSTREAMSREQUEST.fields_by_name["session"]._options = None +_BATCHCREATEREADSESSIONSTREAMSREQUEST.fields_by_name[ + "requested_streams" +]._options = None _BIGQUERYSTORAGE = _descriptor.ServiceDescriptor( name="BigQueryStorage", @@ -1520,10 +1647,10 @@ file=DESCRIPTOR, index=0, serialized_options=_b( - "\312A\036bigquerystorage.googleapis.com\322AWhttps://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform" + "\312A\036bigquerystorage.googleapis.com\322A\211\001https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=2945, - serialized_end=4165, + serialized_start=3373, + serialized_end=4760, methods=[ _descriptor.MethodDescriptor( name="CreateReadSession", @@ -1533,7 +1660,7 @@ input_type=_CREATEREADSESSIONREQUEST, output_type=_READSESSION, serialized_options=_b( - '\202\323\344\223\002w"0/v1beta1/{table_reference.project_id=projects/*}:\001*Z@";/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}:\001*' + '\202\323\344\223\002w"0/v1beta1/{table_reference.project_id=projects/*}:\001*Z@";/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}:\001*\332A(table_reference,parent,requested_streams' ), ), _descriptor.MethodDescriptor( @@ -1544,7 +1671,7 @@ input_type=_READROWSREQUEST, output_type=_READROWSRESPONSE, serialized_options=_b( - "\202\323\344\223\002;\0229/v1beta1/{read_position.stream.name=projects/*/streams/*}" + "\202\323\344\223\002;\0229/v1beta1/{read_position.stream.name=projects/*/streams/*}\332A\rread_position" ), ), _descriptor.MethodDescriptor( @@ -1555,7 +1682,7 @@ input_type=_BATCHCREATEREADSESSIONSTREAMSREQUEST, output_type=_BATCHCREATEREADSESSIONSTREAMSRESPONSE, serialized_options=_b( - '\202\323\344\223\0022"-/v1beta1/{session.name=projects/*/sessions/*}:\001*' + '\202\323\344\223\0022"-/v1beta1/{session.name=projects/*/sessions/*}:\001*\332A\031session,requested_streams' ), ), _descriptor.MethodDescriptor( @@ -1566,7 +1693,7 @@ input_type=_FINALIZESTREAMREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - '\202\323\344\223\0020"+/v1beta1/{stream.name=projects/*/streams/*}:\001*' + '\202\323\344\223\0020"+/v1beta1/{stream.name=projects/*/streams/*}:\001*\332A\006stream' ), ), _descriptor.MethodDescriptor( @@ -1577,7 +1704,7 @@ input_type=_SPLITREADSTREAMREQUEST, output_type=_SPLITREADSTREAMRESPONSE, serialized_options=_b( - "\202\323\344\223\0026\0224/v1beta1/{original_stream.name=projects/*/streams/*}" + "\202\323\344\223\0026\0224/v1beta1/{original_stream.name=projects/*/streams/*}\332A\017original_stream" ), ), ], diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference.proto b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference.proto index fb74bbf6c642..a55dc48eb023 100644 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference.proto +++ b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference.proto @@ -17,6 +17,7 @@ syntax = "proto3"; package google.cloud.bigquery.storage.v1beta1; +import "google/api/resource.proto"; import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2.py index 30c85aa2f469..992067f07367 100644 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2.py +++ b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2.py @@ -15,6 +15,7 @@ _sym_db = _symbol_database.Default() +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -26,9 +27,12 @@ "\n)com.google.cloud.bigquery.storage.v1beta1B\023TableReferenceProtoZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage" ), serialized_pb=_b( - '\nAgoogle/cloud/bigquery/storage_v1beta1/proto/table_reference.proto\x12%google.cloud.bigquery.storage.v1beta1\x1a\x1fgoogle/protobuf/timestamp.proto"J\n\x0eTableReference\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x12\x10\n\x08table_id\x18\x03 \x01(\t"C\n\x0eTableModifiers\x12\x31\n\rsnapshot_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x8e\x01\n)com.google.cloud.bigquery.storage.v1beta1B\x13TableReferenceProtoZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' + '\nAgoogle/cloud/bigquery/storage_v1beta1/proto/table_reference.proto\x12%google.cloud.bigquery.storage.v1beta1\x1a\x19google/api/resource.proto\x1a\x1fgoogle/protobuf/timestamp.proto"J\n\x0eTableReference\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x12\x10\n\x08table_id\x18\x03 \x01(\t"C\n\x0eTableModifiers\x12\x31\n\rsnapshot_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x8e\x01\n)com.google.cloud.bigquery.storage.v1beta1B\x13TableReferenceProtoZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_resource__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], ) @@ -102,8 +106,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=141, - serialized_end=215, + serialized_start=168, + serialized_end=242, ) @@ -141,8 +145,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=217, - serialized_end=284, + serialized_start=244, + serialized_end=311, ) _TABLEMODIFIERS.fields_by_name[ diff --git a/bigquery_storage/synth.metadata b/bigquery_storage/synth.metadata index 5695e76c7e81..e23a8d443a90 100644 --- a/bigquery_storage/synth.metadata +++ b/bigquery_storage/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-21T23:20:45.275738Z", + "updateTime": "2019-09-27T14:19:13.569391Z", "sources": [ { "generator": { "name": "artman", - "version": "0.34.0", - "dockerImage": "googleapis/artman@sha256:38a27ba6245f96c3e86df7acb2ebcc33b4f186d9e475efe2d64303aec3d4e0ea" + "version": "0.37.1", + "dockerImage": "googleapis/artman@sha256:6068f67900a3f0bdece596b97bda8fc70406ca0e137a941f4c81d3217c994a80" } }, { "git": { "name": "googleapis", - "remote": "git@github.com:googleapis/googleapis.git", - "sha": "92bebf78345af8b2d3585220527115bda8bdedf8", - "internalRef": "264715111" + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "cd112d8d255e0099df053643d4bd12c228ef7b1b", + "internalRef": "271468707" } }, { diff --git a/bigquery_storage/synth.py b/bigquery_storage/synth.py index cbbeffd02acf..0866ae8eeb98 100644 --- a/bigquery_storage/synth.py +++ b/bigquery_storage/synth.py @@ -117,6 +117,13 @@ r"google.api_core.grpc_helpers.create_channel\(", "google.api_core.grpc_helpers.create_channel( # pragma: no cover", ) + +# Fix up proto docs that are missing summary line. +s.replace( + "google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py", + '"""Attributes:', + '"""Protocol buffer.\n\n Attributes:', +) # END: Ignore lint and coverage # ---------------------------------------------------------------------------- diff --git a/bigtable/CHANGELOG.md b/bigtable/CHANGELOG.md index 80eaff1617b3..70e61063c488 100644 --- a/bigtable/CHANGELOG.md +++ b/bigtable/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-bigtable/#history +## 1.1.0 + +10-15-2019 06:40 PDT + + +### New Features +- Add IAM Policy methods to table admin client (via synth). ([#9172](https://github.com/googleapis/google-cloud-python/pull/9172)) + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +### Documentation +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Fix misspelling in docs. ([#9184](https://github.com/googleapis/google-cloud-python/pull/9184)) + ## 1.0.0 08-28-2019 12:49 PDT diff --git a/bigtable/docs/conf.py b/bigtable/docs/conf.py index ebf93a3dbe5c..af2c90faeb46 100644 --- a/bigtable/docs/conf.py +++ b/bigtable/docs/conf.py @@ -342,7 +342,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), } diff --git a/bigtable/docs/data-api.rst b/bigtable/docs/data-api.rst index d35b50079426..b50995be7368 100644 --- a/bigtable/docs/data-api.rst +++ b/bigtable/docs/data-api.rst @@ -84,7 +84,7 @@ Building Up Mutations --------------------- In all three cases, a set of mutations (or two sets) are built up -on a row before they are sent of in a batch via +on a row before they are sent off in a batch via .. code:: python diff --git a/bigtable/google/cloud/bigtable/client.py b/bigtable/google/cloud/bigtable/client.py index f9a625b15843..8a8315623cae 100644 --- a/bigtable/google/cloud/bigtable/client.py +++ b/bigtable/google/cloud/bigtable/client.py @@ -60,11 +60,13 @@ """Scope for reading table data.""" -def _create_gapic_client(client_class): +def _create_gapic_client(client_class, client_options=None): def inner(self): if self._emulator_host is None: return client_class( - credentials=self._credentials, client_info=self._client_info + credentials=self._credentials, + client_info=self._client_info, + client_options=client_options, ) else: return client_class( @@ -109,6 +111,17 @@ class Client(ClientWithProject): you only need to set this if you're developing your own library or partner tool. + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options + on the client. API Endpoint should be set through client_options. + + :type admin_client_options: + :class:`~google.api_core.client_options.ClientOptions` or :class:`dict` + :param admin_client_options: (Optional) Client options used to set user + options on the client. API Endpoint for admin operations should be set + through admin_client_options. + :type channel: :instance: grpc.Channel :param channel (grpc.Channel): (Optional) DEPRECATED: A ``Channel`` instance through which to make calls. @@ -130,6 +143,8 @@ def __init__( read_only=False, admin=False, client_info=_CLIENT_INFO, + client_options=None, + admin_client_options=None, channel=None, ): if read_only and admin: @@ -155,6 +170,8 @@ def __init__( stacklevel=2, ) + self._client_options = client_options + self._admin_client_options = admin_client_options self._channel = channel self.SCOPE = self._get_scopes() super(Client, self).__init__(project=project, credentials=credentials) @@ -213,9 +230,10 @@ def table_data_client(self): :returns: A BigtableClient object. """ if self._table_data_client is None: - self._table_data_client = _create_gapic_client(bigtable_v2.BigtableClient)( - self + klass = _create_gapic_client( + bigtable_v2.BigtableClient, client_options=self._client_options ) + self._table_data_client = klass(self) return self._table_data_client @property @@ -237,9 +255,11 @@ def table_admin_client(self): if self._table_admin_client is None: if not self._admin: raise ValueError("Client is not an admin client.") - self._table_admin_client = _create_gapic_client( - bigtable_admin_v2.BigtableTableAdminClient - )(self) + klass = _create_gapic_client( + bigtable_admin_v2.BigtableTableAdminClient, + client_options=self._admin_client_options, + ) + self._table_admin_client = klass(self) return self._table_admin_client @property @@ -261,9 +281,11 @@ def instance_admin_client(self): if self._instance_admin_client is None: if not self._admin: raise ValueError("Client is not an admin client.") - self._instance_admin_client = _create_gapic_client( - bigtable_admin_v2.BigtableInstanceAdminClient - )(self) + klass = _create_gapic_client( + bigtable_admin_v2.BigtableInstanceAdminClient, + client_options=self._admin_client_options, + ) + self._instance_admin_client = klass(self) return self._instance_admin_client def instance(self, instance_id, display_name=None, instance_type=None, labels=None): diff --git a/bigtable/google/cloud/bigtable_admin_v2/__init__.py b/bigtable/google/cloud/bigtable_admin_v2/__init__.py index 501d8f24d3e1..021abe2ce82b 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/__init__.py +++ b/bigtable/google/cloud/bigtable_admin_v2/__init__.py @@ -35,4 +35,9 @@ class BigtableTableAdminClient(bigtable_table_admin_client.BigtableTableAdminCli enums = enums -__all__ = ("enums", "types", "BigtableInstanceAdminClient", "BigtableTableAdminClient") +__all__ = ( + "enums", + "types", + "BigtableInstanceAdminClient", + "BigtableTableAdminClient", +) diff --git a/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_instance_admin_client.py b/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_instance_admin_client.py index fed633c8dc6b..c0bac0768dcf 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_instance_admin_client.py +++ b/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_instance_admin_client.py @@ -49,7 +49,9 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-bigtable").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-bigtable", +).version class BigtableInstanceAdminClient(object): @@ -128,7 +130,7 @@ def location_path(cls, project, location): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) def __init__( @@ -218,12 +220,12 @@ def __init__( self.transport = transport else: self.transport = bigtable_instance_admin_grpc_transport.BigtableInstanceAdminGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -234,7 +236,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -333,7 +335,10 @@ def create_instance( ) request = bigtable_instance_admin_pb2.CreateInstanceRequest( - parent=parent, instance_id=instance_id, instance=instance, clusters=clusters + parent=parent, + instance_id=instance_id, + instance=instance, + clusters=clusters, ) if metadata is None: metadata = [] @@ -410,7 +415,7 @@ def get_instance( client_info=self._client_info, ) - request = bigtable_instance_admin_pb2.GetInstanceRequest(name=name) + request = bigtable_instance_admin_pb2.GetInstanceRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -483,7 +488,7 @@ def list_instances( ) request = bigtable_instance_admin_pb2.ListInstancesRequest( - parent=parent, page_token=page_token + parent=parent, page_token=page_token, ) if metadata is None: metadata = [] @@ -586,7 +591,11 @@ def update_instance( ) request = instance_pb2.Instance( - name=name, display_name=display_name, type=type_, labels=labels, state=state + name=name, + display_name=display_name, + type=type_, + labels=labels, + state=state, ) if metadata is None: metadata = [] @@ -679,7 +688,7 @@ def partial_update_instance( ) request = bigtable_instance_admin_pb2.PartialUpdateInstanceRequest( - instance=instance, update_mask=update_mask + instance=instance, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -753,7 +762,7 @@ def delete_instance( client_info=self._client_info, ) - request = bigtable_instance_admin_pb2.DeleteInstanceRequest(name=name) + request = bigtable_instance_admin_pb2.DeleteInstanceRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -849,7 +858,7 @@ def create_cluster( ) request = bigtable_instance_admin_pb2.CreateClusterRequest( - parent=parent, cluster_id=cluster_id, cluster=cluster + parent=parent, cluster_id=cluster_id, cluster=cluster, ) if metadata is None: metadata = [] @@ -926,7 +935,7 @@ def get_cluster( client_info=self._client_info, ) - request = bigtable_instance_admin_pb2.GetClusterRequest(name=name) + request = bigtable_instance_admin_pb2.GetClusterRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1002,7 +1011,7 @@ def list_clusters( ) request = bigtable_instance_admin_pb2.ListClustersRequest( - parent=parent, page_token=page_token + parent=parent, page_token=page_token, ) if metadata is None: metadata = [] @@ -1177,7 +1186,7 @@ def delete_cluster( client_info=self._client_info, ) - request = bigtable_instance_admin_pb2.DeleteClusterRequest(name=name) + request = bigtable_instance_admin_pb2.DeleteClusterRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1340,7 +1349,7 @@ def get_app_profile( client_info=self._client_info, ) - request = bigtable_instance_admin_pb2.GetAppProfileRequest(name=name) + request = bigtable_instance_admin_pb2.GetAppProfileRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1432,7 +1441,7 @@ def list_app_profiles( ) request = bigtable_instance_admin_pb2.ListAppProfilesRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -1620,7 +1629,7 @@ def delete_app_profile( ) request = bigtable_instance_admin_pb2.DeleteAppProfileRequest( - name=name, ignore_warnings=ignore_warnings + name=name, ignore_warnings=ignore_warnings, ) if metadata is None: metadata = [] @@ -1699,7 +1708,7 @@ def get_iam_policy( ) request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_ + resource=resource, options=options_, ) if metadata is None: metadata = [] @@ -1782,7 +1791,7 @@ def set_iam_policy( client_info=self._client_info, ) - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1861,7 +1870,7 @@ def test_iam_permissions( ) request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions + resource=resource, permissions=permissions, ) if metadata is None: metadata = [] diff --git a/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py b/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py index 96026779dbf1..bdc3f1a88749 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py +++ b/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py @@ -54,7 +54,9 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-bigtable").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-bigtable", +).version class BigtableTableAdminClient(object): @@ -220,12 +222,12 @@ def __init__( self.transport = transport else: self.transport = bigtable_table_admin_grpc_transport.BigtableTableAdminGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -236,7 +238,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -337,7 +339,10 @@ def create_table( ) request = bigtable_table_admin_pb2.CreateTableRequest( - parent=parent, table_id=table_id, table=table, initial_splits=initial_splits + parent=parent, + table_id=table_id, + table=table, + initial_splits=initial_splits, ) if metadata is None: metadata = [] @@ -439,7 +444,7 @@ def create_table_from_snapshot( ) request = bigtable_table_admin_pb2.CreateTableFromSnapshotRequest( - parent=parent, table_id=table_id, source_snapshot=source_snapshot + parent=parent, table_id=table_id, source_snapshot=source_snapshot, ) if metadata is None: metadata = [] @@ -538,7 +543,7 @@ def list_tables( ) request = bigtable_table_admin_pb2.ListTablesRequest( - parent=parent, view=view, page_size=page_size + parent=parent, view=view, page_size=page_size, ) if metadata is None: metadata = [] @@ -623,7 +628,7 @@ def get_table( client_info=self._client_info, ) - request = bigtable_table_admin_pb2.GetTableRequest(name=name, view=view) + request = bigtable_table_admin_pb2.GetTableRequest(name=name, view=view,) if metadata is None: metadata = [] metadata = list(metadata) @@ -690,7 +695,7 @@ def delete_table( client_info=self._client_info, ) - request = bigtable_table_admin_pb2.DeleteTableRequest(name=name) + request = bigtable_table_admin_pb2.DeleteTableRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -776,7 +781,7 @@ def modify_column_families( ) request = bigtable_table_admin_pb2.ModifyColumnFamiliesRequest( - name=name, modifications=modifications + name=name, modifications=modifications, ) if metadata is None: metadata = [] @@ -939,7 +944,7 @@ def generate_consistency_token( client_info=self._client_info, ) - request = bigtable_table_admin_pb2.GenerateConsistencyTokenRequest(name=name) + request = bigtable_table_admin_pb2.GenerateConsistencyTokenRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1018,7 +1023,7 @@ def check_consistency( ) request = bigtable_table_admin_pb2.CheckConsistencyRequest( - name=name, consistency_token=consistency_token + name=name, consistency_token=consistency_token, ) if metadata is None: metadata = [] @@ -1097,7 +1102,7 @@ def get_iam_policy( ) request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_ + resource=resource, options=options_, ) if metadata is None: metadata = [] @@ -1180,7 +1185,7 @@ def set_iam_policy( client_info=self._client_info, ) - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1259,7 +1264,7 @@ def test_iam_permissions( ) request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions + resource=resource, permissions=permissions, ) if metadata is None: metadata = [] @@ -1462,7 +1467,7 @@ def get_snapshot( client_info=self._client_info, ) - request = bigtable_table_admin_pb2.GetSnapshotRequest(name=name) + request = bigtable_table_admin_pb2.GetSnapshotRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1563,7 +1568,7 @@ def list_snapshots( ) request = bigtable_table_admin_pb2.ListSnapshotsRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -1648,7 +1653,7 @@ def delete_snapshot( client_info=self._client_info, ) - request = bigtable_table_admin_pb2.DeleteSnapshotRequest(name=name) + request = bigtable_table_admin_pb2.DeleteSnapshotRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_instance_admin_grpc_transport.py b/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_instance_admin_grpc_transport.py index afb72e0c8ab9..3482193864b1 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_instance_admin_grpc_transport.py +++ b/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_instance_admin_grpc_transport.py @@ -64,7 +64,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -85,7 +85,7 @@ def __init__( self._stubs = { "bigtable_instance_admin_stub": bigtable_instance_admin_pb2_grpc.BigtableInstanceAdminStub( channel - ) + ), } # Because this API includes a method that returns a diff --git a/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_table_admin_grpc_transport.py b/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_table_admin_grpc_transport.py index 5d93e555b3b9..08e70e48b31b 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_table_admin_grpc_transport.py +++ b/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_table_admin_grpc_transport.py @@ -64,7 +64,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -85,7 +85,7 @@ def __init__( self._stubs = { "bigtable_table_admin_stub": bigtable_table_admin_pb2_grpc.BigtableTableAdminStub( channel - ) + ), } # Because this API includes a method that returns a diff --git a/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_instance_admin_pb2.py b/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_instance_admin_pb2.py index 01d3fa7e3a4d..5f0601ac2026 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_instance_admin_pb2.py +++ b/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_instance_admin_pb2.py @@ -189,7 +189,7 @@ ), ], extensions=[], - nested_types=[_CREATEINSTANCEREQUEST_CLUSTERSENTRY], + nested_types=[_CREATEINSTANCEREQUEST_CLUSTERSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -225,7 +225,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -453,7 +453,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -567,7 +567,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -738,7 +738,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1170,7 +1170,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_table_admin_pb2.py b/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_table_admin_pb2.py index c81637a34f25..f2a95d546ac3 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_table_admin_pb2.py +++ b/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_table_admin_pb2.py @@ -76,7 +76,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -171,7 +171,7 @@ ), ], extensions=[], - nested_types=[_CREATETABLEREQUEST_SPLIT], + nested_types=[_CREATETABLEREQUEST_SPLIT,], enum_types=[], serialized_options=None, is_extendable=False, @@ -334,7 +334,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=660, serialized_end=769, @@ -572,7 +572,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -681,7 +681,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1254, serialized_end=1419, @@ -732,7 +732,7 @@ ), ], extensions=[], - nested_types=[_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION], + nested_types=[_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION,], enum_types=[], serialized_options=None, is_extendable=False, @@ -768,7 +768,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -807,7 +807,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -903,7 +903,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1053,7 +1053,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1224,7 +1224,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/bigtable/google/cloud/bigtable_admin_v2/proto/instance_pb2.py b/bigtable/google/cloud/bigtable_admin_v2/proto/instance_pb2.py index 49164dfe6693..ef3a7ce7858b 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/proto/instance_pb2.py +++ b/bigtable/google/cloud/bigtable_admin_v2/proto/instance_pb2.py @@ -281,8 +281,8 @@ ), ], extensions=[], - nested_types=[_INSTANCE_LABELSENTRY], - enum_types=[_INSTANCE_STATE, _INSTANCE_TYPE], + nested_types=[_INSTANCE_LABELSENTRY,], + enum_types=[_INSTANCE_STATE, _INSTANCE_TYPE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -393,7 +393,7 @@ ], extensions=[], nested_types=[], - enum_types=[_CLUSTER_STATE], + enum_types=[_CLUSTER_STATE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -594,7 +594,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=826, serialized_end=1212, diff --git a/bigtable/google/cloud/bigtable_admin_v2/proto/table_pb2.py b/bigtable/google/cloud/bigtable_admin_v2/proto/table_pb2.py index e15dd2ba5b3f..c348fe4a280f 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/proto/table_pb2.py +++ b/bigtable/google/cloud/bigtable_admin_v2/proto/table_pb2.py @@ -192,11 +192,11 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], - enum_types=[_TABLE_CLUSTERSTATE_REPLICATIONSTATE], + enum_types=[_TABLE_CLUSTERSTATE_REPLICATIONSTATE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -404,7 +404,7 @@ _TABLE_CLUSTERSTATESENTRY, _TABLE_COLUMNFAMILIESENTRY, ], - enum_types=[_TABLE_TIMESTAMPGRANULARITY, _TABLE_VIEW], + enum_types=[_TABLE_TIMESTAMPGRANULARITY, _TABLE_VIEW,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -439,7 +439,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -478,7 +478,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -516,7 +516,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -611,7 +611,7 @@ ), ], extensions=[], - nested_types=[_GCRULE_INTERSECTION, _GCRULE_UNION], + nested_types=[_GCRULE_INTERSECTION, _GCRULE_UNION,], enum_types=[], serialized_options=None, is_extendable=False, @@ -624,7 +624,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1087, serialized_end=1428, @@ -767,7 +767,7 @@ ], extensions=[], nested_types=[], - enum_types=[_SNAPSHOT_STATE], + enum_types=[_SNAPSHOT_STATE,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/bigtable/google/cloud/bigtable_v2/__init__.py b/bigtable/google/cloud/bigtable_v2/__init__.py index ca18668ce49b..216ef8fb1daa 100644 --- a/bigtable/google/cloud/bigtable_v2/__init__.py +++ b/bigtable/google/cloud/bigtable_v2/__init__.py @@ -25,4 +25,7 @@ class BigtableClient(bigtable_client.BigtableClient): __doc__ = bigtable_client.BigtableClient.__doc__ -__all__ = ("types", "BigtableClient") +__all__ = ( + "types", + "BigtableClient", +) diff --git a/bigtable/google/cloud/bigtable_v2/gapic/bigtable_client.py b/bigtable/google/cloud/bigtable_v2/gapic/bigtable_client.py index 36021068dfd8..b13faac448c1 100644 --- a/bigtable/google/cloud/bigtable_v2/gapic/bigtable_client.py +++ b/bigtable/google/cloud/bigtable_v2/gapic/bigtable_client.py @@ -36,7 +36,9 @@ from google.cloud.bigtable_v2.proto import data_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-bigtable").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-bigtable", +).version class BigtableClient(object): @@ -166,12 +168,12 @@ def __init__( self.transport = transport else: self.transport = bigtable_grpc_transport.BigtableGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -182,7 +184,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -352,7 +354,7 @@ def sample_row_keys( ) request = bigtable_pb2.SampleRowKeysRequest( - table_name=table_name, app_profile_id=app_profile_id + table_name=table_name, app_profile_id=app_profile_id, ) if metadata is None: metadata = [] @@ -537,7 +539,7 @@ def mutate_rows( ) request = bigtable_pb2.MutateRowsRequest( - table_name=table_name, entries=entries, app_profile_id=app_profile_id + table_name=table_name, entries=entries, app_profile_id=app_profile_id, ) if metadata is None: metadata = [] diff --git a/bigtable/google/cloud/bigtable_v2/gapic/transports/bigtable_grpc_transport.py b/bigtable/google/cloud/bigtable_v2/gapic/transports/bigtable_grpc_transport.py index 4c34d5fb1b39..3c30df704a57 100644 --- a/bigtable/google/cloud/bigtable_v2/gapic/transports/bigtable_grpc_transport.py +++ b/bigtable/google/cloud/bigtable_v2/gapic/transports/bigtable_grpc_transport.py @@ -60,7 +60,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -78,7 +78,9 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = {"bigtable_stub": bigtable_pb2_grpc.BigtableStub(channel)} + self._stubs = { + "bigtable_stub": bigtable_pb2_grpc.BigtableStub(channel), + } @classmethod def create_channel( diff --git a/bigtable/google/cloud/bigtable_v2/proto/bigtable_pb2.py b/bigtable/google/cloud/bigtable_v2/proto/bigtable_pb2.py index 1c2b0f1ae134..4e4ab84e1cc8 100644 --- a/bigtable/google/cloud/bigtable_v2/proto/bigtable_pb2.py +++ b/bigtable/google/cloud/bigtable_v2/proto/bigtable_pb2.py @@ -337,7 +337,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=488, serialized_end=749, @@ -388,7 +388,7 @@ ), ], extensions=[], - nested_types=[_READROWSRESPONSE_CELLCHUNK], + nested_types=[_READROWSRESPONSE_CELLCHUNK,], enum_types=[], serialized_options=None, is_extendable=False, @@ -746,7 +746,7 @@ ), ], extensions=[], - nested_types=[_MUTATEROWSREQUEST_ENTRY], + nested_types=[_MUTATEROWSREQUEST_ENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -838,10 +838,10 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], - nested_types=[_MUTATEROWSRESPONSE_ENTRY], + nested_types=[_MUTATEROWSRESPONSE_ENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1006,7 +1006,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1138,7 +1138,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/bigtable/google/cloud/bigtable_v2/proto/data_pb2.py b/bigtable/google/cloud/bigtable_v2/proto/data_pb2.py index 8e5cff816455..825a0fa9222f 100644 --- a/bigtable/google/cloud/bigtable_v2/proto/data_pb2.py +++ b/bigtable/google/cloud/bigtable_v2/proto/data_pb2.py @@ -754,7 +754,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -792,7 +792,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1231,7 +1231,7 @@ ), ], extensions=[], - nested_types=[_ROWFILTER_CHAIN, _ROWFILTER_INTERLEAVE, _ROWFILTER_CONDITION], + nested_types=[_ROWFILTER_CHAIN, _ROWFILTER_INTERLEAVE, _ROWFILTER_CONDITION,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1244,7 +1244,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=991, serialized_end=2110, @@ -1441,7 +1441,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1573,7 +1573,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=2113, serialized_end=2698, @@ -1674,7 +1674,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=2701, serialized_end=2829, diff --git a/bigtable/google/cloud/bigtable_v2/types.py b/bigtable/google/cloud/bigtable_v2/types.py index 53937c1d1687..a445eae1cade 100644 --- a/bigtable/google/cloud/bigtable_v2/types.py +++ b/bigtable/google/cloud/bigtable_v2/types.py @@ -27,9 +27,16 @@ from google.rpc import status_pb2 -_shared_modules = [any_pb2, wrappers_pb2, status_pb2] - -_local_modules = [bigtable_pb2, data_pb2] +_shared_modules = [ + any_pb2, + wrappers_pb2, + status_pb2, +] + +_local_modules = [ + bigtable_pb2, + data_pb2, +] names = [] diff --git a/bigtable/setup.py b/bigtable/setup.py index 26956b393471..82c3aa499dcd 100644 --- a/bigtable/setup.py +++ b/bigtable/setup.py @@ -22,15 +22,15 @@ name = 'google-cloud-bigtable' description = 'Google Cloud Bigtable API client library' -version = '1.0.0' +version = '1.1.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ - 'google-api-core[grpc] >= 1.14.0, < 2.0.0dev', - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", ] extras = { diff --git a/bigtable/synth.metadata b/bigtable/synth.metadata index a7291727fa94..25c44a96331d 100644 --- a/bigtable/synth.metadata +++ b/bigtable/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-09-04T12:14:03.458374Z", + "updateTime": "2019-10-29T12:15:54.915199Z", "sources": [ { "generator": { "name": "artman", - "version": "0.36.2", - "dockerImage": "googleapis/artman@sha256:0e6f3a668cd68afc768ecbe08817cf6e56a0e64fcbdb1c58c3b97492d12418a1" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "a2158681f6e30c5fd9446eb1fd7b5021a6d48bfa", - "internalRef": "266999433" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index 28d95d985ffe..ae43bb10ecdf 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -1076,8 +1076,8 @@ def test_read_with_label_applied(self): # Make sure COLUMN_FAMILY_ID1 was the only key. self.assertEqual(len(cells_returned), 0) - cell1_new, = col_fam1.pop(COL_NAME1) - cell3_new, = col_fam1.pop(COL_NAME2) + (cell1_new,) = col_fam1.pop(COL_NAME1) + (cell3_new,) = col_fam1.pop(COL_NAME2) # Make sure COL_NAME1 and COL_NAME2 were the only keys. self.assertEqual(len(col_fam1), 0) diff --git a/bigtable/tests/unit/test_client.py b/bigtable/tests/unit/test_client.py index 05a017d898af..8a2ef3c64b56 100644 --- a/bigtable/tests/unit/test_client.py +++ b/bigtable/tests/unit/test_client.py @@ -21,12 +21,12 @@ class Test__create_gapic_client(unittest.TestCase): - def _invoke_client_factory(self, client_class): + def _invoke_client_factory(self, client_class, **kw): from google.cloud.bigtable.client import _create_gapic_client - return _create_gapic_client(client_class) + return _create_gapic_client(client_class, **kw) - def test_without_emulator(self): + def test_wo_emulator(self): client_class = mock.Mock() credentials = _make_credentials() client = _Client(credentials) @@ -36,10 +36,30 @@ def test_without_emulator(self): self.assertIs(result, client_class.return_value) client_class.assert_called_once_with( - credentials=client._credentials, client_info=client_info + credentials=client._credentials, + client_info=client_info, + client_options=None, ) - def test_with_emulator(self): + def test_wo_emulator_w_client_options(self): + client_class = mock.Mock() + credentials = _make_credentials() + client = _Client(credentials) + client_info = client._client_info = mock.Mock() + client_options = mock.Mock() + + result = self._invoke_client_factory( + client_class, client_options=client_options + )(client) + + self.assertIs(result, client_class.return_value) + client_class.assert_called_once_with( + credentials=client._credentials, + client_info=client_info, + client_options=client_options, + ) + + def test_w_emulator(self): client_class = mock.Mock() emulator_host = emulator_channel = object() credentials = _make_credentials() @@ -210,6 +230,25 @@ def test_table_data_client_not_initialized_w_client_info(self): self.assertIs(table_data_client._client_info, client_info) self.assertIs(client._table_data_client, table_data_client) + def test_table_data_client_not_initialized_w_client_options(self): + credentials = _make_credentials() + client_options = mock.Mock() + client = self._make_one( + project=self.PROJECT, credentials=credentials, client_options=client_options + ) + + patch = mock.patch("google.cloud.bigtable_v2.BigtableClient") + with patch as mocked: + table_data_client = client.table_data_client + + self.assertIs(table_data_client, mocked.return_value) + self.assertIs(client._table_data_client, table_data_client) + mocked.assert_called_once_with( + client_info=client._client_info, + credentials=mock.ANY, # added scopes + client_options=client_options, + ) + def test_table_data_client_initialized(self): credentials = _make_credentials() client = self._make_one( @@ -257,6 +296,28 @@ def test_table_admin_client_not_initialized_w_client_info(self): self.assertIs(table_admin_client._client_info, client_info) self.assertIs(client._table_admin_client, table_admin_client) + def test_table_admin_client_not_initialized_w_client_options(self): + credentials = _make_credentials() + admin_client_options = mock.Mock() + client = self._make_one( + project=self.PROJECT, + credentials=credentials, + admin=True, + admin_client_options=admin_client_options, + ) + + patch = mock.patch("google.cloud.bigtable_admin_v2.BigtableTableAdminClient") + with patch as mocked: + table_admin_client = client.table_admin_client + + self.assertIs(table_admin_client, mocked.return_value) + self.assertIs(client._table_admin_client, table_admin_client) + mocked.assert_called_once_with( + client_info=client._client_info, + credentials=mock.ANY, # added scopes + client_options=admin_client_options, + ) + def test_table_admin_client_initialized(self): credentials = _make_credentials() client = self._make_one( @@ -287,7 +348,7 @@ def test_instance_admin_client_not_initialized_w_admin_flag(self): self.assertIs(instance_admin_client._client_info, _CLIENT_INFO) self.assertIs(client._instance_admin_client, instance_admin_client) - def test_instance_admin_client_not_initialized_w_admin_and_client_info(self): + def test_instance_admin_client_not_initialized_w_client_info(self): from google.cloud.bigtable_admin_v2 import BigtableInstanceAdminClient credentials = _make_credentials() @@ -304,6 +365,28 @@ def test_instance_admin_client_not_initialized_w_admin_and_client_info(self): self.assertIs(instance_admin_client._client_info, client_info) self.assertIs(client._instance_admin_client, instance_admin_client) + def test_instance_admin_client_not_initialized_w_client_options(self): + credentials = _make_credentials() + admin_client_options = mock.Mock() + client = self._make_one( + project=self.PROJECT, + credentials=credentials, + admin=True, + admin_client_options=admin_client_options, + ) + + patch = mock.patch("google.cloud.bigtable_admin_v2.BigtableInstanceAdminClient") + with patch as mocked: + instance_admin_client = client.instance_admin_client + + self.assertIs(instance_admin_client, mocked.return_value) + self.assertIs(client._instance_admin_client, instance_admin_client) + mocked.assert_called_once_with( + client_info=client._client_info, + credentials=mock.ANY, # added scopes + client_options=admin_client_options, + ) + def test_instance_admin_client_initialized(self): credentials = _make_credentials() client = self._make_one( diff --git a/cloudbuild/.coveragerc b/cloudbuild/.coveragerc new file mode 100644 index 000000000000..b178b094aa1d --- /dev/null +++ b/cloudbuild/.coveragerc @@ -0,0 +1,19 @@ +# Generated by synthtool. DO NOT EDIT! +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore abstract methods + raise NotImplementedError +omit = + */gapic/*.py + */proto/*.py + */core/*.py + */site-packages/*.py \ No newline at end of file diff --git a/cloudbuild/.flake8 b/cloudbuild/.flake8 new file mode 100644 index 000000000000..0268ecc9c55c --- /dev/null +++ b/cloudbuild/.flake8 @@ -0,0 +1,14 @@ +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + *_pb2.py + + # Standard linting exemptions. + __pycache__, + .git, + *.pyc, + conf.py diff --git a/cloudbuild/.repo-metadata.json b/cloudbuild/.repo-metadata.json new file mode 100644 index 000000000000..2fa277bbebf2 --- /dev/null +++ b/cloudbuild/.repo-metadata.json @@ -0,0 +1,13 @@ +{ + "name": "cloudbuild", + "name_pretty": "Cloud Build", + "product_documentation": "https://cloud.google.com/cloud-build/docs/", + "client_documentation": "https://googleapis.dev/python/cloudbuild/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/5226584", + "release_level": "alpha", + "language": "python", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-build", + "api_id": "cloudbuild.googleapis.com", + "requires_billing": false +} \ No newline at end of file diff --git a/cloudbuild/CHANGELOG.md b/cloudbuild/CHANGELOG.md new file mode 100644 index 000000000000..ad158194c5b6 --- /dev/null +++ b/cloudbuild/CHANGELOG.md @@ -0,0 +1,14 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-build/#history + +## 0.1.0 + +11-07-2019 10:48 PST + +**Note**: This library is incompatible with `google-cloud-containeranalysis<0.3.1`. Please upgrade to `google-cloud-containeranalysis>=0.3.1` to use this library. + +### New Features +- Initial generation of Cloud Build v1 ([#9510](https://github.com/googleapis/google-cloud-python/pull/9510)). diff --git a/cloudbuild/LICENSE b/cloudbuild/LICENSE new file mode 100644 index 000000000000..a8ee855de2aa --- /dev/null +++ b/cloudbuild/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/cloudbuild/MANIFEST.in b/cloudbuild/MANIFEST.in new file mode 100644 index 000000000000..9cbf175afe6b --- /dev/null +++ b/cloudbuild/MANIFEST.in @@ -0,0 +1,5 @@ +include README.rst LICENSE +recursive-include google *.json *.proto +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/cloudbuild/README.rst b/cloudbuild/README.rst new file mode 100644 index 000000000000..da8f93730a63 --- /dev/null +++ b/cloudbuild/README.rst @@ -0,0 +1,75 @@ +Python Client for Cloud Build API (`Alpha`_) +============================================ + +`Cloud Build API`_: Creates and manages builds on Google Cloud Platform. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Alpha: https://github.com/googleapis/google-cloud-python/blob/master/README.rst +.. _Cloud Build API: https://cloud.google.com/cloud-build +.. _Client Library Documentation: https://googleapis.github.io/google-cloud-python/latest/cloudbuild/usage.html +.. _Product Documentation: https://cloud.google.com/cloud-build + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud Build API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud Build API.: https://cloud.google.com/cloud-build +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-build + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-build + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud Build API + API to see other available methods on the client. +- Read the `Cloud Build API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud Build API Product documentation: https://cloud.google.com/cloud-build +.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst \ No newline at end of file diff --git a/cloudbuild/docs/README.rst b/cloudbuild/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/cloudbuild/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/cloudbuild/docs/changelog.md b/cloudbuild/docs/changelog.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/cloudbuild/docs/changelog.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/docs/conf.py b/cloudbuild/docs/conf.py similarity index 51% rename from docs/conf.py rename to cloudbuild/docs/conf.py index 9d1e92760a54..6afa33d99f41 100644 --- a/docs/conf.py +++ b/cloudbuild/docs/conf.py @@ -1,21 +1,9 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# google-cloud-build documentation build configuration file # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# google-cloud documentation build configuration file, created by -# sphinx-quickstart on Tue Jan 21 22:24:47 2014. -# -# This file is execfile()d with the current directory set to its containing dir. +# This file is execfile()d with the current directory set to its +# containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. @@ -23,331 +11,347 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import email +import sys import os -import pkg_resources -import shutil - -from sphinx.util import logging - -logger = logging.getLogger(__name__) +import shlex # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("..")) -# -- General configuration ----------------------------------------------------- +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = '1.6.3' +needs_sphinx = "1.6.3" -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', - 'sphinx.ext.napoleon', - 'sphinx.ext.todo', - 'sphinx.ext.viewcode', + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", ] +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # Allow markdown includes (so releases.md can include CHANGLEOG.md) # http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = { - '.md': 'recommonmark.parser.CommonMarkParser', -} +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} -# The suffix of source filenames. -source_suffix = ['.rst', '.md'] +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'google-cloud' -copyright = u'2014-2017, Google' +project = u"google-cloud-build" +copyright = u"2017, Google" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # +# The full version, including alpha/beta/rc tags. +release = __version__ # The short X.Y version. -distro = pkg_resources.get_distribution('google-cloud-core') -release = os.getenv('SPHINX_RELEASE', distro.version) +version = ".".join(release.split(".")[0:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True -# -- Options for HTML output --------------------------------------------------- +# -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'alabaster' +html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - 'description': 'Google Cloud Client Libraries for Python', - 'github_user': 'GoogleCloudPlatform', - 'github_repo': 'google-cloud-python', - 'github_banner': True, - 'font_family': "'Roboto', Georgia, sans", - 'head_font_family': "'Roboto', Georgia, serif", - 'code_font_family': "'Roboto Mono', 'Consolas', monospace", + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", } # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -html_favicon = '_static/images/favicon.ico' +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +# html_static_path = [] -html_add_permalinks = '#' +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -html_sidebars = { - '**': [ - 'about.html', - 'navigation.html', - 'relations.html', - 'searchbox.html', - ] -} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None -# Output file base name for HTML help builder. -htmlhelp_basename = 'google-cloud-doc' +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' -html_context = {} +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' -# -- Options for LaTeX output -------------------------------------------------- +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-build-doc" -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', +# -- Options for LaTeX output --------------------------------------------- -# Additional stuff for the LaTeX preamble. -#'preamble': '', +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', } -metadata = distro.get_metadata(distro.PKG_INFO) -author = email.message_from_string(metadata).get('Author') # Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'google-cloud.tex', u'google-cloud Documentation', - author, 'manual'), + ( + master_doc, + "google-cloud-build.tex", + u"google-cloud-build Documentation", + author, + "manual", + ) ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for warnings ------------------------------------------------------ +# latex_domain_indices = True -suppress_warnings = [ - # Temporarily suppress ths to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - 'ref.python' -] - - -# -- Options for manual page output -------------------------------------------- +# -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'google-cloud', u'google-cloud Documentation', - [author], 1) + (master_doc, "google-cloud-build", u"google-cloud-build Documentation", [author], 1) ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False -# -- Options for Texinfo output ------------------------------------------------ +# -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'google-cloud', u'google-cloud Documentation', - author, 'google-cloud', 'Python API for Google Cloud.', - 'Miscellaneous'), + ( + master_doc, + "google-cloud-build", + u"google-cloud-build Documentation", + author, + "google-cloud-build", + "GAPIC library for the {metadata.shortName} v1 service", + "APIs", + ) ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' -# This pulls class descriptions from the class docstring, -# and parameter definitions from the __init__ docstring. -autoclass_content = 'both' +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False -# Automatically generate API reference stubs from autosummary. -# http://www.sphinx-doc.org/en/master/ext/autosummary.html#generating-stub-pages-automatically -autosummary_generate = True -# Configuration for intersphinx: +# Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'fastavro': ('https://fastavro.readthedocs.io/en/stable/', None), - 'google-auth': ('https://google-auth.readthedocs.io/en/stable', None), - 'google-gax': ('https://gax-python.readthedocs.io/en/latest/', None), - 'grpc': ('https://grpc.io/grpc/python/', None), - 'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None), - 'python': ('https://docs.python.org/3', None), - 'requests': ('https://2.python-requests.org/en/master/', None), + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), + "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), + "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } -# Static HTML pages, e.g. to support redirects -# See: https://tech.signavio.com/2017/managing-sphinx-redirects -# HTML pages to be copied from source to target -static_html_pages = [ - 'datastore/usage.html', - 'dns/usage.html', - 'bigquery/usage.html', - 'bigquery/generated/google.cloud.bigquery.magics.html', - 'runtimeconfig/usage.html', - 'spanner/usage.html', - 'trace/starting.html', -] -def copy_static_html_pages(app, exception): - if exception is None and app.builder.name == 'html': - for static_html_page in static_html_pages: - target_path = app.outdir + '/' + static_html_page - src_path = app.srcdir + '/' + static_html_page - if os.path.isfile(src_path): - logger.info( - 'Copying static html: %s -> %s', src_path, target_path) - shutil.copyfile(src_path, target_path) - -def setup(app): - app.connect('build-finished', copy_static_html_pages) +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/cloudbuild/docs/gapic/v1/api.rst b/cloudbuild/docs/gapic/v1/api.rst new file mode 100644 index 000000000000..b73769d5c2a1 --- /dev/null +++ b/cloudbuild/docs/gapic/v1/api.rst @@ -0,0 +1,6 @@ +Client for Cloud Build API +========================== + +.. automodule:: google.cloud.devtools.cloudbuild_v1 + :members: + :inherited-members: \ No newline at end of file diff --git a/cloudbuild/docs/gapic/v1/types.rst b/cloudbuild/docs/gapic/v1/types.rst new file mode 100644 index 000000000000..db501043e959 --- /dev/null +++ b/cloudbuild/docs/gapic/v1/types.rst @@ -0,0 +1,5 @@ +Types for Cloud Build API Client +================================ + +.. automodule:: google.cloud.devtools.cloudbuild_v1.types + :members: \ No newline at end of file diff --git a/cloudbuild/docs/index.rst b/cloudbuild/docs/index.rst new file mode 100644 index 000000000000..382348916108 --- /dev/null +++ b/cloudbuild/docs/index.rst @@ -0,0 +1,22 @@ +.. include:: README.rst + + +API Reference +------------- + +.. toctree:: + :maxdepth: 2 + + gapic/v1/api + gapic/v1/types + + +Changelog +--------- + +For a list of all ``google-cloud-build`` releases: + +.. toctree:: + :maxdepth: 2 + + changelog diff --git a/cloudbuild/google/__init__.py b/cloudbuild/google/__init__.py new file mode 100644 index 000000000000..8fcc60e2b9c6 --- /dev/null +++ b/cloudbuild/google/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/cloudbuild/google/cloud/__init__.py b/cloudbuild/google/cloud/__init__.py new file mode 100644 index 000000000000..8fcc60e2b9c6 --- /dev/null +++ b/cloudbuild/google/cloud/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/cloudbuild/google/cloud/devtools/__init__.py b/cloudbuild/google/cloud/devtools/__init__.py new file mode 100644 index 000000000000..8fcc60e2b9c6 --- /dev/null +++ b/cloudbuild/google/cloud/devtools/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/cloudbuild/google/cloud/devtools/cloudbuild.py b/cloudbuild/google/cloud/devtools/cloudbuild.py new file mode 100644 index 000000000000..b2cca3a5e07f --- /dev/null +++ b/cloudbuild/google/cloud/devtools/cloudbuild.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.cloud.devtools.cloudbuild_v1 import CloudBuildClient +from google.cloud.devtools.cloudbuild_v1 import enums +from google.cloud.devtools.cloudbuild_v1 import types + + +__all__ = ("enums", "types", "CloudBuildClient") diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/__init__.py b/cloudbuild/google/cloud/devtools/cloudbuild_v1/__init__.py similarity index 58% rename from videointelligence/google/cloud/videointelligence_v1beta1/__init__.py rename to cloudbuild/google/cloud/devtools/cloudbuild_v1/__init__.py index d71748798c3c..4e82e772fa0d 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/__init__.py +++ b/cloudbuild/google/cloud/devtools/cloudbuild_v1/__init__.py @@ -17,18 +17,14 @@ from __future__ import absolute_import -from google.cloud.videointelligence_v1beta1 import types -from google.cloud.videointelligence_v1beta1.gapic import enums -from google.cloud.videointelligence_v1beta1.gapic import ( - video_intelligence_service_client, -) +from google.cloud.devtools.cloudbuild_v1 import types +from google.cloud.devtools.cloudbuild_v1.gapic import cloud_build_client +from google.cloud.devtools.cloudbuild_v1.gapic import enums -class VideoIntelligenceServiceClient( - video_intelligence_service_client.VideoIntelligenceServiceClient -): - __doc__ = video_intelligence_service_client.VideoIntelligenceServiceClient.__doc__ +class CloudBuildClient(cloud_build_client.CloudBuildClient): + __doc__ = cloud_build_client.CloudBuildClient.__doc__ enums = enums -__all__ = ("enums", "types", "VideoIntelligenceServiceClient") +__all__ = ("enums", "types", "CloudBuildClient") diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/transports/__init__.py b/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/__init__.py similarity index 100% rename from videointelligence/google/cloud/videointelligence_v1beta1/gapic/transports/__init__.py rename to cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/__init__.py diff --git a/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/cloud_build_client.py b/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/cloud_build_client.py new file mode 100644 index 000000000000..675f033b2666 --- /dev/null +++ b/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/cloud_build_client.py @@ -0,0 +1,1334 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.devtools.cloudbuild.v1 CloudBuild API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import grpc + +from google.cloud.devtools.cloudbuild_v1.gapic import cloud_build_client_config +from google.cloud.devtools.cloudbuild_v1.gapic import enums +from google.cloud.devtools.cloudbuild_v1.gapic.transports import ( + cloud_build_grpc_transport, +) +from google.cloud.devtools.cloudbuild_v1.proto import cloudbuild_pb2 +from google.cloud.devtools.cloudbuild_v1.proto import cloudbuild_pb2_grpc +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-build").version + + +class CloudBuildClient(object): + """ + Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a ``Build``, which describes the + location of the source to build, how to build the source, and where to + store the built artifacts, if any. + + A user can list previously-requested builds or get builds by their ID to + determine the status of the build. + """ + + SERVICE_ADDRESS = "cloudbuild.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.devtools.cloudbuild.v1.CloudBuild" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudBuildClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.CloudBuildGrpcTransport, + Callable[[~.Credentials, type], ~.CloudBuildGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = cloud_build_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=cloud_build_grpc_transport.CloudBuildGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = cloud_build_grpc_transport.CloudBuildGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def create_build( + self, + project_id, + build, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Starts a build with the specified configuration. + + This method returns a long-running ``Operation``, which includes the + build ID. Pass the build ID to ``GetBuild`` to determine the build + status (such as ``SUCCESS`` or ``FAILURE``). + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `build`: + >>> build = {} + >>> + >>> response = client.create_build(project_id, build) + + Args: + project_id (str): Required. ID of the project. + build (Union[dict, ~google.cloud.devtools.cloudbuild_v1.types.Build]): Required. Build resource to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.devtools.cloudbuild_v1.types.Build` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.Operation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_build" not in self._inner_api_calls: + self._inner_api_calls[ + "create_build" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_build, + default_retry=self._method_configs["CreateBuild"].retry, + default_timeout=self._method_configs["CreateBuild"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.CreateBuildRequest(project_id=project_id, build=build) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["create_build"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_build( + self, + project_id, + id_, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Returns information about a previously requested build. + + The ``Build`` that is returned includes its status (such as ``SUCCESS``, + ``FAILURE``, or ``WORKING``), and timing information. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `id_`: + >>> id_ = '' + >>> + >>> response = client.get_build(project_id, id_) + + Args: + project_id (str): Required. ID of the project. + id_ (str): Required. ID of the build. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.Build` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_build" not in self._inner_api_calls: + self._inner_api_calls[ + "get_build" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_build, + default_retry=self._method_configs["GetBuild"].retry, + default_timeout=self._method_configs["GetBuild"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.GetBuildRequest(project_id=project_id, id=id_) + return self._inner_api_calls["get_build"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_builds( + self, + project_id, + page_size=None, + filter_=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists previously requested builds. + + Previously requested builds may still be in-progress, or may have finished + successfully or unsuccessfully. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # Iterate over all results + >>> for element in client.list_builds(project_id): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_builds(project_id).pages: + ... for element in page: + ... # process element + ... pass + + Args: + project_id (str): Required. ID of the project. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + filter_ (str): The raw filter text to constrain the results. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.devtools.cloudbuild_v1.types.Build` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_builds" not in self._inner_api_calls: + self._inner_api_calls[ + "list_builds" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_builds, + default_retry=self._method_configs["ListBuilds"].retry, + default_timeout=self._method_configs["ListBuilds"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.ListBuildsRequest( + project_id=project_id, page_size=page_size, filter=filter_ + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_builds"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="builds", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def cancel_build( + self, + project_id, + id_, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Cancels a build in progress. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `id_`: + >>> id_ = '' + >>> + >>> response = client.cancel_build(project_id, id_) + + Args: + project_id (str): Required. ID of the project. + id_ (str): Required. ID of the build. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.Build` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "cancel_build" not in self._inner_api_calls: + self._inner_api_calls[ + "cancel_build" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.cancel_build, + default_retry=self._method_configs["CancelBuild"].retry, + default_timeout=self._method_configs["CancelBuild"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.CancelBuildRequest(project_id=project_id, id=id_) + return self._inner_api_calls["cancel_build"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def create_build_trigger( + self, + project_id, + trigger, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates a new ``BuildTrigger``. + + This API is experimental. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `trigger`: + >>> trigger = {} + >>> + >>> response = client.create_build_trigger(project_id, trigger) + + Args: + project_id (str): Required. ID of the project for which to configure automatic builds. + trigger (Union[dict, ~google.cloud.devtools.cloudbuild_v1.types.BuildTrigger]): Required. ``BuildTrigger`` to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.devtools.cloudbuild_v1.types.BuildTrigger` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.BuildTrigger` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_build_trigger" not in self._inner_api_calls: + self._inner_api_calls[ + "create_build_trigger" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_build_trigger, + default_retry=self._method_configs["CreateBuildTrigger"].retry, + default_timeout=self._method_configs["CreateBuildTrigger"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.CreateBuildTriggerRequest( + project_id=project_id, trigger=trigger + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["create_build_trigger"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_build_trigger( + self, + project_id, + trigger_id, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Returns information about a ``BuildTrigger``. + + This API is experimental. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `trigger_id`: + >>> trigger_id = '' + >>> + >>> response = client.get_build_trigger(project_id, trigger_id) + + Args: + project_id (str): Required. ID of the project that owns the trigger. + trigger_id (str): Required. ID of the ``BuildTrigger`` to get. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.BuildTrigger` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_build_trigger" not in self._inner_api_calls: + self._inner_api_calls[ + "get_build_trigger" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_build_trigger, + default_retry=self._method_configs["GetBuildTrigger"].retry, + default_timeout=self._method_configs["GetBuildTrigger"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.GetBuildTriggerRequest( + project_id=project_id, trigger_id=trigger_id + ) + return self._inner_api_calls["get_build_trigger"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_build_triggers( + self, + project_id, + page_size=None, + page_token=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists existing ``BuildTrigger``\ s. + + This API is experimental. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> response = client.list_build_triggers(project_id) + + Args: + project_id (str): Required. ID of the project for which to list BuildTriggers. + page_size (int): Number of results to return in the list. + page_token (str): Token to provide to skip to a particular spot in the list. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.ListBuildTriggersResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_build_triggers" not in self._inner_api_calls: + self._inner_api_calls[ + "list_build_triggers" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_build_triggers, + default_retry=self._method_configs["ListBuildTriggers"].retry, + default_timeout=self._method_configs["ListBuildTriggers"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.ListBuildTriggersRequest( + project_id=project_id, page_size=page_size, page_token=page_token + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project_id", project_id)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["list_build_triggers"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_build_trigger( + self, + project_id, + trigger_id, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `trigger_id`: + >>> trigger_id = '' + >>> + >>> client.delete_build_trigger(project_id, trigger_id) + + Args: + project_id (str): Required. ID of the project that owns the trigger. + trigger_id (str): Required. ID of the ``BuildTrigger`` to delete. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_build_trigger" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_build_trigger" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_build_trigger, + default_retry=self._method_configs["DeleteBuildTrigger"].retry, + default_timeout=self._method_configs["DeleteBuildTrigger"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.DeleteBuildTriggerRequest( + project_id=project_id, trigger_id=trigger_id + ) + self._inner_api_calls["delete_build_trigger"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def update_build_trigger( + self, + project_id, + trigger_id, + trigger, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Updates a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `trigger_id`: + >>> trigger_id = '' + >>> + >>> # TODO: Initialize `trigger`: + >>> trigger = {} + >>> + >>> response = client.update_build_trigger(project_id, trigger_id, trigger) + + Args: + project_id (str): Required. ID of the project that owns the trigger. + trigger_id (str): Required. ID of the ``BuildTrigger`` to update. + trigger (Union[dict, ~google.cloud.devtools.cloudbuild_v1.types.BuildTrigger]): Required. ``BuildTrigger`` to update. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.devtools.cloudbuild_v1.types.BuildTrigger` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.BuildTrigger` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "update_build_trigger" not in self._inner_api_calls: + self._inner_api_calls[ + "update_build_trigger" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_build_trigger, + default_retry=self._method_configs["UpdateBuildTrigger"].retry, + default_timeout=self._method_configs["UpdateBuildTrigger"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.UpdateBuildTriggerRequest( + project_id=project_id, trigger_id=trigger_id, trigger=trigger + ) + return self._inner_api_calls["update_build_trigger"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def run_build_trigger( + self, + project_id, + trigger_id, + source, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Runs a ``BuildTrigger`` at a particular source revision. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `trigger_id`: + >>> trigger_id = '' + >>> + >>> # TODO: Initialize `source`: + >>> source = {} + >>> + >>> response = client.run_build_trigger(project_id, trigger_id, source) + + Args: + project_id (str): Required. ID of the project. + trigger_id (str): Required. ID of the trigger. + source (Union[dict, ~google.cloud.devtools.cloudbuild_v1.types.RepoSource]): Required. Source to build against this trigger. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.devtools.cloudbuild_v1.types.RepoSource` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.Operation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "run_build_trigger" not in self._inner_api_calls: + self._inner_api_calls[ + "run_build_trigger" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.run_build_trigger, + default_retry=self._method_configs["RunBuildTrigger"].retry, + default_timeout=self._method_configs["RunBuildTrigger"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.RunBuildTriggerRequest( + project_id=project_id, trigger_id=trigger_id, source=source + ) + return self._inner_api_calls["run_build_trigger"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def retry_build( + self, + project_id, + id_, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates a new build based on the specified build. + + This method creates a new build using the original build request, which + may or may not result in an identical build. + + For triggered builds: + + - Triggered builds resolve to a precise revision; therefore a retry of + a triggered build will result in a build that uses the same revision. + + For non-triggered builds that specify ``RepoSource``: + + - If the original build built from the tip of a branch, the retried + build will build from the tip of that branch, which may not be the + same revision as the original build. + - If the original build specified a commit sha or revision ID, the + retried build will use the identical source. + + For builds that specify ``StorageSource``: + + - If the original build pulled source from Google Cloud Storage without + specifying the generation of the object, the new build will use the + current object, which may be different from the original build + source. + - If the original build pulled source from Cloud Storage and specified + the generation of the object, the new build will attempt to use the + same object, which may or may not be available depending on the + bucket's lifecycle management settings. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> # TODO: Initialize `project_id`: + >>> project_id = '' + >>> + >>> # TODO: Initialize `id_`: + >>> id_ = '' + >>> + >>> response = client.retry_build(project_id, id_) + + Args: + project_id (str): Required. ID of the project. + id_ (str): Required. Build ID of the original build. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.Operation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "retry_build" not in self._inner_api_calls: + self._inner_api_calls[ + "retry_build" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.retry_build, + default_retry=self._method_configs["RetryBuild"].retry, + default_timeout=self._method_configs["RetryBuild"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.RetryBuildRequest(project_id=project_id, id=id_) + return self._inner_api_calls["retry_build"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def create_worker_pool( + self, + parent=None, + worker_pool=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates a ``WorkerPool`` to run the builds, and returns the new worker + pool. + + This API is experimental. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> response = client.create_worker_pool() + + Args: + parent (str): ID of the parent project. + worker_pool (Union[dict, ~google.cloud.devtools.cloudbuild_v1.types.WorkerPool]): ``WorkerPool`` resource to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.devtools.cloudbuild_v1.types.WorkerPool` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.WorkerPool` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_worker_pool" not in self._inner_api_calls: + self._inner_api_calls[ + "create_worker_pool" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_worker_pool, + default_retry=self._method_configs["CreateWorkerPool"].retry, + default_timeout=self._method_configs["CreateWorkerPool"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.CreateWorkerPoolRequest( + parent=parent, worker_pool=worker_pool + ) + return self._inner_api_calls["create_worker_pool"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_worker_pool( + self, + name=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Returns information about a ``WorkerPool``. + + This API is experimental. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> response = client.get_worker_pool() + + Args: + name (str): The field will contain name of the resource requested, for example: + "projects/project-1/workerPools/workerpool-name" + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.WorkerPool` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_worker_pool" not in self._inner_api_calls: + self._inner_api_calls[ + "get_worker_pool" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_worker_pool, + default_retry=self._method_configs["GetWorkerPool"].retry, + default_timeout=self._method_configs["GetWorkerPool"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.GetWorkerPoolRequest(name=name) + return self._inner_api_calls["get_worker_pool"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_worker_pool( + self, + name=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes a ``WorkerPool`` by its project ID and WorkerPool name. + + This API is experimental. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> client.delete_worker_pool() + + Args: + name (str): The field will contain name of the resource requested, for example: + "projects/project-1/workerPools/workerpool-name" + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_worker_pool" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_worker_pool" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_worker_pool, + default_retry=self._method_configs["DeleteWorkerPool"].retry, + default_timeout=self._method_configs["DeleteWorkerPool"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.DeleteWorkerPoolRequest(name=name) + self._inner_api_calls["delete_worker_pool"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def update_worker_pool( + self, + name=None, + worker_pool=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Update a ``WorkerPool``. + + This API is experimental. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> response = client.update_worker_pool() + + Args: + name (str): The field will contain name of the resource requested, for example: + "projects/project-1/workerPools/workerpool-name" + worker_pool (Union[dict, ~google.cloud.devtools.cloudbuild_v1.types.WorkerPool]): ``WorkerPool`` resource to update. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.devtools.cloudbuild_v1.types.WorkerPool` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.WorkerPool` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "update_worker_pool" not in self._inner_api_calls: + self._inner_api_calls[ + "update_worker_pool" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_worker_pool, + default_retry=self._method_configs["UpdateWorkerPool"].retry, + default_timeout=self._method_configs["UpdateWorkerPool"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.UpdateWorkerPoolRequest( + name=name, worker_pool=worker_pool + ) + return self._inner_api_calls["update_worker_pool"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_worker_pools( + self, + parent=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + List project's ``WorkerPool``\ s. + + This API is experimental. + + Example: + >>> from google.cloud.devtools import cloudbuild_v1 + >>> + >>> client = cloudbuild_v1.CloudBuildClient() + >>> + >>> response = client.list_worker_pools() + + Args: + parent (str): ID of the parent project. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.devtools.cloudbuild_v1.types.ListWorkerPoolsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_worker_pools" not in self._inner_api_calls: + self._inner_api_calls[ + "list_worker_pools" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_worker_pools, + default_retry=self._method_configs["ListWorkerPools"].retry, + default_timeout=self._method_configs["ListWorkerPools"].timeout, + client_info=self._client_info, + ) + + request = cloudbuild_pb2.ListWorkerPoolsRequest(parent=parent) + return self._inner_api_calls["list_worker_pools"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/cloud_build_client_config.py b/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/cloud_build_client_config.py new file mode 100644 index 000000000000..6f67ef28527e --- /dev/null +++ b/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/cloud_build_client_config.py @@ -0,0 +1,103 @@ +config = { + "interfaces": { + "google.devtools.cloudbuild.v1.CloudBuild": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "CreateBuild": { + "timeout_millis": 20000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetBuild": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListBuilds": { + "timeout_millis": 20000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "CancelBuild": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "CreateBuildTrigger": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetBuildTrigger": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListBuildTriggers": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteBuildTrigger": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "UpdateBuildTrigger": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "RunBuildTrigger": { + "timeout_millis": 20000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "RetryBuild": { + "timeout_millis": 20000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "CreateWorkerPool": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetWorkerPool": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "DeleteWorkerPool": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "UpdateWorkerPool": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "ListWorkerPools": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/enums.py b/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/enums.py new file mode 100644 index 000000000000..91f5ea5af7d4 --- /dev/null +++ b/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/enums.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class Build(object): + class Status(enum.IntEnum): + """ + Possible status of a build or build step. + + Attributes: + STATUS_UNKNOWN (int): Status of the build is unknown. + QUEUED (int): Build or step is queued; work has not yet begun. + WORKING (int): Build or step is being executed. + SUCCESS (int): Build or step finished successfully. + FAILURE (int): Build or step failed to complete successfully. + INTERNAL_ERROR (int): Build or step failed due to an internal cause. + TIMEOUT (int): Build or step took longer than was allowed. + CANCELLED (int): Build or step was canceled by a user. + """ + + STATUS_UNKNOWN = 0 + QUEUED = 1 + WORKING = 2 + SUCCESS = 3 + FAILURE = 4 + INTERNAL_ERROR = 5 + TIMEOUT = 6 + CANCELLED = 7 + + +class BuildOptions(object): + class LogStreamingOption(enum.IntEnum): + """ + Specifies the behavior when writing build logs to Google Cloud Storage. + + Attributes: + STREAM_DEFAULT (int): Service may automatically determine build log streaming behavior. + STREAM_ON (int): Build logs should be streamed to Google Cloud Storage. + STREAM_OFF (int): Build logs should not be streamed to Google Cloud Storage; they will be + written when the build is completed. + """ + + STREAM_DEFAULT = 0 + STREAM_ON = 1 + STREAM_OFF = 2 + + class LoggingMode(enum.IntEnum): + """ + Specifies the logging mode. + + Attributes: + LOGGING_UNSPECIFIED (int): The service determines the logging mode. The default is ``LEGACY``. Do + not rely on the default logging behavior as it may change in the future. + LEGACY (int): Stackdriver logging and Cloud Storage logging are enabled. + GCS_ONLY (int): Only Cloud Storage logging is enabled. + """ + + LOGGING_UNSPECIFIED = 0 + LEGACY = 1 + GCS_ONLY = 2 + + class MachineType(enum.IntEnum): + """ + Supported VM sizes. + + Attributes: + UNSPECIFIED (int): Standard machine type. + N1_HIGHCPU_8 (int): Highcpu machine with 8 CPUs. + N1_HIGHCPU_32 (int): Highcpu machine with 32 CPUs. + """ + + UNSPECIFIED = 0 + N1_HIGHCPU_8 = 1 + N1_HIGHCPU_32 = 2 + + class SubstitutionOption(enum.IntEnum): + """ + Specifies the behavior when there is an error in the substitution checks. + + Attributes: + MUST_MATCH (int): Fails the build if error in substitutions checks, like missing + a substitution in the template or in the map. + ALLOW_LOOSE (int): Do not fail the build if error in substitutions checks. + """ + + MUST_MATCH = 0 + ALLOW_LOOSE = 1 + + class VerifyOption(enum.IntEnum): + """ + Specifies the manner in which the build should be verified, if at all. + + Attributes: + NOT_VERIFIED (int): Not a verifiable build. (default) + VERIFIED (int): Verified build. + """ + + NOT_VERIFIED = 0 + VERIFIED = 1 + + +class Hash(object): + class HashType(enum.IntEnum): + """ + Specifies the hash algorithm, if any. + + Attributes: + NONE (int): No hash requested. + SHA256 (int): Use a sha256 hash. + MD5 (int): Use a md5 hash. + """ + + NONE = 0 + SHA256 = 1 + MD5 = 2 + + +class PullRequestFilter(object): + class CommentControl(enum.IntEnum): + """ + Controls behavior of Pull Request comments. + + Attributes: + COMMENTS_DISABLED (int): Do not require comments on Pull Requests before builds are triggered. + COMMENTS_ENABLED (int): Enforce that repository owners or collaborators must comment on Pull + Requests before builds are triggered. + """ + + COMMENTS_DISABLED = 0 + COMMENTS_ENABLED = 1 + + +class WorkerPool(object): + class Region(enum.IntEnum): + """ + Supported GCP regions to create the ``WorkerPool``. + + Attributes: + REGION_UNSPECIFIED (int): no region + US_CENTRAL1 (int): us-central1 region + US_WEST1 (int): us-west1 region + US_EAST1 (int): us-east1 region + US_EAST4 (int): us-east4 region + """ + + REGION_UNSPECIFIED = 0 + US_CENTRAL1 = 1 + US_WEST1 = 2 + US_EAST1 = 3 + US_EAST4 = 4 + + class Status(enum.IntEnum): + """ + ``WorkerPool`` status + + Attributes: + STATUS_UNSPECIFIED (int): Status of the ``WorkerPool`` is unknown. + CREATING (int): ``WorkerPool`` is being created. + RUNNING (int): ``WorkerPool`` is running. + DELETING (int): ``WorkerPool`` is being deleting: cancelling builds and draining + workers. + DELETED (int): ``WorkerPool`` is deleted. + """ + + STATUS_UNSPECIFIED = 0 + CREATING = 1 + RUNNING = 2 + DELETING = 3 + DELETED = 4 diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/proto/__init__.py b/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/transports/__init__.py similarity index 100% rename from videointelligence/google/cloud/videointelligence_v1beta1/proto/__init__.py rename to cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/transports/__init__.py diff --git a/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/transports/cloud_build_grpc_transport.py b/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/transports/cloud_build_grpc_transport.py new file mode 100644 index 000000000000..7703cc3c4fb2 --- /dev/null +++ b/cloudbuild/google/cloud/devtools/cloudbuild_v1/gapic/transports/cloud_build_grpc_transport.py @@ -0,0 +1,372 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers + +from google.cloud.devtools.cloudbuild_v1.proto import cloudbuild_pb2_grpc + + +class CloudBuildGrpcTransport(object): + """gRPC transport class providing stubs for + google.devtools.cloudbuild.v1 CloudBuild API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, channel=None, credentials=None, address="cloudbuild.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = {"cloud_build_stub": cloudbuild_pb2_grpc.CloudBuildStub(channel)} + + @classmethod + def create_channel( + cls, address="cloudbuild.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def create_build(self): + """Return the gRPC stub for :meth:`CloudBuildClient.create_build`. + + Starts a build with the specified configuration. + + This method returns a long-running ``Operation``, which includes the + build ID. Pass the build ID to ``GetBuild`` to determine the build + status (such as ``SUCCESS`` or ``FAILURE``). + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].CreateBuild + + @property + def get_build(self): + """Return the gRPC stub for :meth:`CloudBuildClient.get_build`. + + Returns information about a previously requested build. + + The ``Build`` that is returned includes its status (such as ``SUCCESS``, + ``FAILURE``, or ``WORKING``), and timing information. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].GetBuild + + @property + def list_builds(self): + """Return the gRPC stub for :meth:`CloudBuildClient.list_builds`. + + Lists previously requested builds. + + Previously requested builds may still be in-progress, or may have finished + successfully or unsuccessfully. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].ListBuilds + + @property + def cancel_build(self): + """Return the gRPC stub for :meth:`CloudBuildClient.cancel_build`. + + Cancels a build in progress. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].CancelBuild + + @property + def create_build_trigger(self): + """Return the gRPC stub for :meth:`CloudBuildClient.create_build_trigger`. + + Creates a new ``BuildTrigger``. + + This API is experimental. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].CreateBuildTrigger + + @property + def get_build_trigger(self): + """Return the gRPC stub for :meth:`CloudBuildClient.get_build_trigger`. + + Returns information about a ``BuildTrigger``. + + This API is experimental. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].GetBuildTrigger + + @property + def list_build_triggers(self): + """Return the gRPC stub for :meth:`CloudBuildClient.list_build_triggers`. + + Lists existing ``BuildTrigger``\ s. + + This API is experimental. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].ListBuildTriggers + + @property + def delete_build_trigger(self): + """Return the gRPC stub for :meth:`CloudBuildClient.delete_build_trigger`. + + Deletes a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].DeleteBuildTrigger + + @property + def update_build_trigger(self): + """Return the gRPC stub for :meth:`CloudBuildClient.update_build_trigger`. + + Updates a ``BuildTrigger`` by its project ID and trigger ID. + + This API is experimental. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].UpdateBuildTrigger + + @property + def run_build_trigger(self): + """Return the gRPC stub for :meth:`CloudBuildClient.run_build_trigger`. + + Runs a ``BuildTrigger`` at a particular source revision. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].RunBuildTrigger + + @property + def retry_build(self): + """Return the gRPC stub for :meth:`CloudBuildClient.retry_build`. + + Creates a new build based on the specified build. + + This method creates a new build using the original build request, which + may or may not result in an identical build. + + For triggered builds: + + - Triggered builds resolve to a precise revision; therefore a retry of + a triggered build will result in a build that uses the same revision. + + For non-triggered builds that specify ``RepoSource``: + + - If the original build built from the tip of a branch, the retried + build will build from the tip of that branch, which may not be the + same revision as the original build. + - If the original build specified a commit sha or revision ID, the + retried build will use the identical source. + + For builds that specify ``StorageSource``: + + - If the original build pulled source from Google Cloud Storage without + specifying the generation of the object, the new build will use the + current object, which may be different from the original build + source. + - If the original build pulled source from Cloud Storage and specified + the generation of the object, the new build will attempt to use the + same object, which may or may not be available depending on the + bucket's lifecycle management settings. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].RetryBuild + + @property + def create_worker_pool(self): + """Return the gRPC stub for :meth:`CloudBuildClient.create_worker_pool`. + + Creates a ``WorkerPool`` to run the builds, and returns the new worker + pool. + + This API is experimental. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].CreateWorkerPool + + @property + def get_worker_pool(self): + """Return the gRPC stub for :meth:`CloudBuildClient.get_worker_pool`. + + Returns information about a ``WorkerPool``. + + This API is experimental. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].GetWorkerPool + + @property + def delete_worker_pool(self): + """Return the gRPC stub for :meth:`CloudBuildClient.delete_worker_pool`. + + Deletes a ``WorkerPool`` by its project ID and WorkerPool name. + + This API is experimental. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].DeleteWorkerPool + + @property + def update_worker_pool(self): + """Return the gRPC stub for :meth:`CloudBuildClient.update_worker_pool`. + + Update a ``WorkerPool``. + + This API is experimental. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].UpdateWorkerPool + + @property + def list_worker_pools(self): + """Return the gRPC stub for :meth:`CloudBuildClient.list_worker_pools`. + + List project's ``WorkerPool``\ s. + + This API is experimental. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["cloud_build_stub"].ListWorkerPools diff --git a/cloudbuild/google/cloud/devtools/cloudbuild_v1/proto/__init__.py b/cloudbuild/google/cloud/devtools/cloudbuild_v1/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/cloudbuild/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto b/cloudbuild/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto new file mode 100644 index 000000000000..a50ec126796b --- /dev/null +++ b/cloudbuild/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto @@ -0,0 +1,1304 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.devtools.cloudbuild.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/devtools/cloudbuild/v1;cloudbuild"; +option java_multiple_files = true; +option java_package = "com.google.cloudbuild.v1"; +option objc_class_prefix = "GCB"; + +// Creates and manages builds on Google Cloud Platform. +// +// The main concept used by this API is a `Build`, which describes the location +// of the source to build, how to build the source, and where to store the +// built artifacts, if any. +// +// A user can list previously-requested builds or get builds by their ID to +// determine the status of the build. +service CloudBuild { + option (google.api.default_host) = "cloudbuild.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform"; + + // Starts a build with the specified configuration. + // + // This method returns a long-running `Operation`, which includes the build + // ID. Pass the build ID to `GetBuild` to determine the build status (such as + // `SUCCESS` or `FAILURE`). + rpc CreateBuild(CreateBuildRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/projects/{project_id}/builds" + body: "build" + }; + option (google.api.method_signature) = "project_id,build"; + option (google.longrunning.operation_info) = { + response_type: "Build" + metadata_type: "BuildOperationMetadata" + }; + } + + // Returns information about a previously requested build. + // + // The `Build` that is returned includes its status (such as `SUCCESS`, + // `FAILURE`, or `WORKING`), and timing information. + rpc GetBuild(GetBuildRequest) returns (Build) { + option (google.api.http) = { + get: "/v1/projects/{project_id}/builds/{id}" + }; + option (google.api.method_signature) = "project_id,id"; + } + + // Lists previously requested builds. + // + // Previously requested builds may still be in-progress, or may have finished + // successfully or unsuccessfully. + rpc ListBuilds(ListBuildsRequest) returns (ListBuildsResponse) { + option (google.api.http) = { + get: "/v1/projects/{project_id}/builds" + }; + option (google.api.method_signature) = "project_id,filter"; + } + + // Cancels a build in progress. + rpc CancelBuild(CancelBuildRequest) returns (Build) { + option (google.api.http) = { + post: "/v1/projects/{project_id}/builds/{id}:cancel" + body: "*" + }; + option (google.api.method_signature) = "project_id,id"; + } + + // Creates a new build based on the specified build. + // + // This method creates a new build using the original build request, which may + // or may not result in an identical build. + // + // For triggered builds: + // + // * Triggered builds resolve to a precise revision; therefore a retry of a + // triggered build will result in a build that uses the same revision. + // + // For non-triggered builds that specify `RepoSource`: + // + // * If the original build built from the tip of a branch, the retried build + // will build from the tip of that branch, which may not be the same revision + // as the original build. + // * If the original build specified a commit sha or revision ID, the retried + // build will use the identical source. + // + // For builds that specify `StorageSource`: + // + // * If the original build pulled source from Google Cloud Storage without + // specifying the generation of the object, the new build will use the current + // object, which may be different from the original build source. + // * If the original build pulled source from Cloud Storage and specified the + // generation of the object, the new build will attempt to use the same + // object, which may or may not be available depending on the bucket's + // lifecycle management settings. + rpc RetryBuild(RetryBuildRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/projects/{project_id}/builds/{id}:retry" + body: "*" + }; + option (google.api.method_signature) = "project_id,id"; + option (google.longrunning.operation_info) = { + response_type: "Build" + metadata_type: "BuildOperationMetadata" + }; + } + + // Creates a new `BuildTrigger`. + // + // This API is experimental. + rpc CreateBuildTrigger(CreateBuildTriggerRequest) returns (BuildTrigger) { + option (google.api.http) = { + post: "/v1/projects/{project_id}/triggers" + body: "trigger" + }; + option (google.api.method_signature) = "project_id,trigger"; + } + + // Returns information about a `BuildTrigger`. + // + // This API is experimental. + rpc GetBuildTrigger(GetBuildTriggerRequest) returns (BuildTrigger) { + option (google.api.http) = { + get: "/v1/projects/{project_id}/triggers/{trigger_id}" + }; + option (google.api.method_signature) = "project_id,trigger_id"; + } + + // Lists existing `BuildTrigger`s. + // + // This API is experimental. + rpc ListBuildTriggers(ListBuildTriggersRequest) + returns (ListBuildTriggersResponse) { + option (google.api.http) = { + get: "/v1/projects/{project_id}/triggers" + }; + option (google.api.method_signature) = "project_id"; + } + + // Deletes a `BuildTrigger` by its project ID and trigger ID. + // + // This API is experimental. + rpc DeleteBuildTrigger(DeleteBuildTriggerRequest) + returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/projects/{project_id}/triggers/{trigger_id}" + }; + option (google.api.method_signature) = "project_id,trigger_id"; + } + + // Updates a `BuildTrigger` by its project ID and trigger ID. + // + // This API is experimental. + rpc UpdateBuildTrigger(UpdateBuildTriggerRequest) returns (BuildTrigger) { + option (google.api.http) = { + patch: "/v1/projects/{project_id}/triggers/{trigger_id}" + body: "trigger" + }; + option (google.api.method_signature) = "project_id,trigger_id,trigger"; + } + + // Runs a `BuildTrigger` at a particular source revision. + rpc RunBuildTrigger(RunBuildTriggerRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/projects/{project_id}/triggers/{trigger_id}:run" + body: "source" + }; + option (google.api.method_signature) = "project_id,trigger_id,source"; + option (google.longrunning.operation_info) = { + response_type: "Build" + metadata_type: "BuildOperationMetadata" + }; + } + + // Creates a `WorkerPool` to run the builds, and returns the new worker pool. + // + // This API is experimental. + rpc CreateWorkerPool(CreateWorkerPoolRequest) returns (WorkerPool) {} + + // Returns information about a `WorkerPool`. + // + // This API is experimental. + rpc GetWorkerPool(GetWorkerPoolRequest) returns (WorkerPool) {} + + // Deletes a `WorkerPool` by its project ID and WorkerPool name. + // + // This API is experimental. + rpc DeleteWorkerPool(DeleteWorkerPoolRequest) + returns (google.protobuf.Empty) {} + + // Update a `WorkerPool`. + // + // This API is experimental. + rpc UpdateWorkerPool(UpdateWorkerPoolRequest) returns (WorkerPool) {} + + // List project's `WorkerPool`s. + // + // This API is experimental. + rpc ListWorkerPools(ListWorkerPoolsRequest) + returns (ListWorkerPoolsResponse) {} +} + +// Specifies a build to retry. +message RetryBuildRequest { + // Required. ID of the project. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. Build ID of the original build. + string id = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Specifies a build trigger to run and the source to use. +message RunBuildTriggerRequest { + // Required. ID of the project. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. ID of the trigger. + string trigger_id = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. Source to build against this trigger. + RepoSource source = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Location of the source in an archive file in Google Cloud Storage. +message StorageSource { + // Google Cloud Storage bucket containing the source (see + // [Bucket Name + // Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). + string bucket = 1; + + // Google Cloud Storage object containing the source. + // + // This object must be a gzipped archive file (`.tar.gz`) containing source to + // build. + string object = 2; + + // Google Cloud Storage generation for the object. If the generation is + // omitted, the latest generation will be used. + int64 generation = 3; +} + +// Location of the source in a Google Cloud Source Repository. +message RepoSource { + // ID of the project that owns the Cloud Source Repository. If omitted, the + // project ID requesting the build is assumed. + string project_id = 1; + + // Name of the Cloud Source Repository. If omitted, the name "default" is + // assumed. + string repo_name = 2; + + // A revision within the Cloud Source Repository must be specified in + // one of these ways. + oneof revision { + // Regex matching branches to build. + // + // The syntax of the regular expressions accepted is the syntax accepted by + // RE2 and described at https://github.com/google/re2/wiki/Syntax + string branch_name = 3; + + // Regex matching tags to build. + // + // The syntax of the regular expressions accepted is the syntax accepted by + // RE2 and described at https://github.com/google/re2/wiki/Syntax + string tag_name = 4; + + // Explicit commit SHA to build. + string commit_sha = 5; + } + + // Directory, relative to the source root, in which to run the build. + // + // This must be a relative path. If a step's `dir` is specified and is an + // absolute path, this value is ignored for that step's execution. + string dir = 7; +} + +// Location of the source in a supported storage service. +message Source { + // Location of source. + oneof source { + // If provided, get the source from this location in Google Cloud Storage. + StorageSource storage_source = 2; + + // If provided, get the source from this location in a Cloud Source + // Repository. + RepoSource repo_source = 3; + } +} + +// An image built by the pipeline. +message BuiltImage { + // Name used to push the container image to Google Container Registry, as + // presented to `docker push`. + string name = 1; + + // Docker Registry 2.0 digest. + string digest = 3; + + // Output only. Stores timing information for pushing the specified image. + TimeSpan push_timing = 4; +} + +// A step in the build pipeline. +message BuildStep { + // Required. The name of the container image that will run this particular + // build step. + // + // If the image is available in the host's Docker daemon's cache, it + // will be run directly. If not, the host will attempt to pull the image + // first, using the builder service account's credentials if necessary. + // + // The Docker daemon's cache will already have the latest versions of all of + // the officially supported build steps + // ([https://github.com/GoogleCloudPlatform/cloud-builders](https://github.com/GoogleCloudPlatform/cloud-builders)). + // The Docker daemon will also have cached many of the layers for some popular + // images, like "ubuntu", "debian", but they will be refreshed at the time you + // attempt to use them. + // + // If you built an image in a previous build step, it will be stored in the + // host's Docker daemon's cache and is available to use as the name for a + // later build step. + string name = 1; + + // A list of environment variable definitions to be used when running a step. + // + // The elements are of the form "KEY=VALUE" for the environment variable "KEY" + // being given the value "VALUE". + repeated string env = 2; + + // A list of arguments that will be presented to the step when it is started. + // + // If the image used to run the step's container has an entrypoint, the `args` + // are used as arguments to that entrypoint. If the image does not define + // an entrypoint, the first element in args is used as the entrypoint, + // and the remainder will be used as arguments. + repeated string args = 3; + + // Working directory to use when running this step's container. + // + // If this value is a relative path, it is relative to the build's working + // directory. If this value is absolute, it may be outside the build's working + // directory, in which case the contents of the path may not be persisted + // across build step executions, unless a `volume` for that path is specified. + // + // If the build specifies a `RepoSource` with `dir` and a step with a `dir`, + // which specifies an absolute path, the `RepoSource` `dir` is ignored for + // the step's execution. + string dir = 4; + + // Unique identifier for this build step, used in `wait_for` to + // reference this build step as a dependency. + string id = 5; + + // The ID(s) of the step(s) that this build step depends on. + // This build step will not start until all the build steps in `wait_for` + // have completed successfully. If `wait_for` is empty, this build step will + // start when all previous build steps in the `Build.Steps` list have + // completed successfully. + repeated string wait_for = 6; + + // Entrypoint to be used instead of the build step image's default entrypoint. + // If unset, the image's default entrypoint is used. + string entrypoint = 7; + + // A list of environment variables which are encrypted using a Cloud Key + // Management Service crypto key. These values must be specified in the + // build's `Secret`. + repeated string secret_env = 8; + + // List of volumes to mount into the build step. + // + // Each volume is created as an empty volume prior to execution of the + // build step. Upon completion of the build, volumes and their contents are + // discarded. + // + // Using a named volume in only one step is not valid as it is indicative + // of a build request with an incorrect configuration. + repeated Volume volumes = 9; + + // Output only. Stores timing information for executing this build step. + TimeSpan timing = 10; + + // Output only. Stores timing information for pulling this build step's + // builder image only. + TimeSpan pull_timing = 13; + + // Time limit for executing this build step. If not defined, the step has no + // time limit and will be allowed to continue to run until either it completes + // or the build itself times out. + google.protobuf.Duration timeout = 11; + + // Output only. Status of the build step. At this time, build step status is + // only updated on build completion; step status is not updated in real-time + // as the build progresses. + Build.Status status = 12; +} + +// Volume describes a Docker container volume which is mounted into build steps +// in order to persist files across build step execution. +message Volume { + // Name of the volume to mount. + // + // Volume names must be unique per build step and must be valid names for + // Docker volumes. Each named volume must be used by at least two build steps. + string name = 1; + + // Path at which to mount the volume. + // + // Paths must be absolute and cannot conflict with other volume paths on the + // same build step or with certain reserved volume paths. + string path = 2; +} + +// Artifacts created by the build pipeline. +message Results { + // Container images that were built as a part of the build. + repeated BuiltImage images = 2; + + // List of build step digests, in the order corresponding to build step + // indices. + repeated string build_step_images = 3; + + // Path to the artifact manifest. Only populated when artifacts are uploaded. + string artifact_manifest = 4; + + // Number of artifacts uploaded. Only populated when artifacts are uploaded. + int64 num_artifacts = 5; + + // List of build step outputs, produced by builder images, in the order + // corresponding to build step indices. + // + // [Cloud Builders](https://cloud.google.com/cloud-build/docs/cloud-builders) + // can produce this output by writing to `$BUILDER_OUTPUT/output`. + // Only the first 4KB of data is stored. + repeated bytes build_step_outputs = 6; + + // Time to push all non-container artifacts. + TimeSpan artifact_timing = 7; +} + +// An artifact that was uploaded during a build. This +// is a single record in the artifact manifest JSON file. +message ArtifactResult { + // The path of an artifact in a Google Cloud Storage bucket, with the + // generation number. For example, + // `gs://mybucket/path/to/output.jar#generation`. + string location = 1; + + // The file hash of the artifact. + repeated FileHashes file_hash = 2; +} + +// A build resource in the Cloud Build API. +// +// At a high level, a `Build` describes where to find source code, how to build +// it (for example, the builder image to run on the source), and where to store +// the built artifacts. +// +// Fields can include the following variables, which will be expanded when the +// build is created: +// +// - $PROJECT_ID: the project ID of the build. +// - $BUILD_ID: the autogenerated ID of the build. +// - $REPO_NAME: the source repository name specified by RepoSource. +// - $BRANCH_NAME: the branch name specified by RepoSource. +// - $TAG_NAME: the tag name specified by RepoSource. +// - $REVISION_ID or $COMMIT_SHA: the commit SHA specified by RepoSource or +// resolved from the specified branch or tag. +// - $SHORT_SHA: first 7 characters of $REVISION_ID or $COMMIT_SHA. +message Build { + // Possible status of a build or build step. + enum Status { + // Status of the build is unknown. + STATUS_UNKNOWN = 0; + + // Build or step is queued; work has not yet begun. + QUEUED = 1; + + // Build or step is being executed. + WORKING = 2; + + // Build or step finished successfully. + SUCCESS = 3; + + // Build or step failed to complete successfully. + FAILURE = 4; + + // Build or step failed due to an internal cause. + INTERNAL_ERROR = 5; + + // Build or step took longer than was allowed. + TIMEOUT = 6; + + // Build or step was canceled by a user. + CANCELLED = 7; + } + + // Output only. Unique identifier of the build. + string id = 1; + + // Output only. ID of the project. + string project_id = 16; + + // Output only. Status of the build. + Status status = 2; + + // Output only. Customer-readable message about the current status. + string status_detail = 24; + + // The location of the source files to build. + Source source = 3; + + // Required. The operations to be performed on the workspace. + repeated BuildStep steps = 11; + + // Output only. Results of the build. + Results results = 10; + + // Output only. Time at which the request to create the build was received. + google.protobuf.Timestamp create_time = 6; + + // Output only. Time at which execution of the build was started. + google.protobuf.Timestamp start_time = 7; + + // Output only. Time at which execution of the build was finished. + // + // The difference between finish_time and start_time is the duration of the + // build's execution. + google.protobuf.Timestamp finish_time = 8; + + // Amount of time that this build should be allowed to run, to second + // granularity. If this amount of time elapses, work on the build will cease + // and the build status will be `TIMEOUT`. + // + // Default time is ten minutes. + google.protobuf.Duration timeout = 12; + + // A list of images to be pushed upon the successful completion of all build + // steps. + // + // The images are pushed using the builder service account's credentials. + // + // The digests of the pushed images will be stored in the `Build` resource's + // results field. + // + // If any of the images fail to be pushed, the build status is marked + // `FAILURE`. + repeated string images = 13; + + // Artifacts produced by the build that should be uploaded upon + // successful completion of all build steps. + Artifacts artifacts = 37; + + // Google Cloud Storage bucket where logs should be written (see + // [Bucket Name + // Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). + // Logs file names will be of the format `${logs_bucket}/log-${build_id}.txt`. + string logs_bucket = 19; + + // Output only. A permanent fixed identifier for source. + SourceProvenance source_provenance = 21; + + // Output only. The ID of the `BuildTrigger` that triggered this build, if it + // was triggered automatically. + string build_trigger_id = 22; + + // Special options for this build. + BuildOptions options = 23; + + // Output only. URL to logs for this build in Google Cloud Console. + string log_url = 25; + + // Substitutions data for `Build` resource. + map substitutions = 29; + + // Tags for annotation of a `Build`. These are not docker tags. + repeated string tags = 31; + + // Secrets to decrypt using Cloud Key Management Service. + repeated Secret secrets = 32; + + // Output only. Stores timing information for phases of the build. Valid keys + // are: + // + // * BUILD: time to execute all build steps + // * PUSH: time to push all specified images. + // * FETCHSOURCE: time to fetch source. + // + // If the build does not specify source or images, + // these keys will not be included. + map timing = 33; +} + +// Artifacts produced by a build that should be uploaded upon +// successful completion of all build steps. +message Artifacts { + // Files in the workspace to upload to Cloud Storage upon successful + // completion of all build steps. + message ArtifactObjects { + // Cloud Storage bucket and optional object path, in the form + // "gs://bucket/path/to/somewhere/". (see [Bucket Name + // Requirements](https://cloud.google.com/storage/docs/bucket-naming#requirements)). + // + // Files in the workspace matching any path pattern will be uploaded to + // Cloud Storage with this location as a prefix. + string location = 1; + + // Path globs used to match files in the build's workspace. + repeated string paths = 2; + + // Output only. Stores timing information for pushing all artifact objects. + TimeSpan timing = 3; + } + + // A list of images to be pushed upon the successful completion of all build + // steps. + // + // The images will be pushed using the builder service account's credentials. + // + // The digests of the pushed images will be stored in the Build resource's + // results field. + // + // If any of the images fail to be pushed, the build is marked FAILURE. + repeated string images = 1; + + // A list of objects to be uploaded to Cloud Storage upon successful + // completion of all build steps. + // + // Files in the workspace matching specified paths globs will be uploaded to + // the specified Cloud Storage location using the builder service account's + // credentials. + // + // The location and generation of the uploaded objects will be stored in the + // Build resource's results field. + // + // If any objects fail to be pushed, the build is marked FAILURE. + ArtifactObjects objects = 2; +} + +// Start and end times for a build execution phase. +message TimeSpan { + // Start of time span. + google.protobuf.Timestamp start_time = 1; + + // End of time span. + google.protobuf.Timestamp end_time = 2; +} + +// Metadata for build operations. +message BuildOperationMetadata { + // The build that the operation is tracking. + Build build = 1; +} + +// Provenance of the source. Ways to find the original source, or verify that +// some source was used for this build. +message SourceProvenance { + // A copy of the build's `source.storage_source`, if exists, with any + // generations resolved. + StorageSource resolved_storage_source = 3; + + // A copy of the build's `source.repo_source`, if exists, with any + // revisions resolved. + RepoSource resolved_repo_source = 6; + + // Output only. Hash(es) of the build source, which can be used to verify that + // the original source integrity was maintained in the build. Note that + // `FileHashes` will only be populated if `BuildOptions` has requested a + // `SourceProvenanceHash`. + // + // The keys to this map are file paths used as build source and the values + // contain the hash values for those files. + // + // If the build source came in a single package such as a gzipped tarfile + // (`.tar.gz`), the `FileHash` will be for the single path to that file. + map file_hashes = 4; +} + +// Container message for hashes of byte content of files, used in +// SourceProvenance messages to verify integrity of source input to the build. +message FileHashes { + // Collection of file hashes. + repeated Hash file_hash = 1; +} + +// Container message for hash values. +message Hash { + // Specifies the hash algorithm, if any. + enum HashType { + // No hash requested. + NONE = 0; + + // Use a sha256 hash. + SHA256 = 1; + + // Use a md5 hash. + MD5 = 2; + } + + // The type of hash that was performed. + HashType type = 1; + + // The hash value. + bytes value = 2; +} + +// Pairs a set of secret environment variables containing encrypted +// values with the Cloud KMS key to use to decrypt the value. +message Secret { + // Cloud KMS key name to use to decrypt these envs. + string kms_key_name = 1; + + // Map of environment variable name to its encrypted value. + // + // Secret environment variables must be unique across all of a build's + // secrets, and must be used by at least one build step. Values can be at most + // 64 KB in size. There can be at most 100 secret values across all of a + // build's secrets. + map secret_env = 3; +} + +// Request to create a new build. +message CreateBuildRequest { + // Required. ID of the project. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. Build resource to create. + Build build = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request to get a build. +message GetBuildRequest { + // Required. ID of the project. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. ID of the build. + string id = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request to list builds. +message ListBuildsRequest { + // Required. ID of the project. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Number of results to return in the list. + int32 page_size = 2; + + // Token to provide to skip to a particular spot in the list. + string page_token = 3; + + // The raw filter text to constrain the results. + string filter = 8; +} + +// Response including listed builds. +message ListBuildsResponse { + // Builds will be sorted by `create_time`, descending. + repeated Build builds = 1; + + // Token to receive the next page of results. + string next_page_token = 2; +} + +// Request to cancel an ongoing build. +message CancelBuildRequest { + // Required. ID of the project. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. ID of the build. + string id = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Configuration for an automated build in response to source repository +// changes. +message BuildTrigger { + // Output only. Unique identifier of the trigger. + string id = 1; + + // Human-readable description of this trigger. + string description = 10; + + // User assigned name of the trigger. Must be unique within the project. + string name = 21; + + // Tags for annotation of a `BuildTrigger` + repeated string tags = 19; + + // Template describing the types of source changes to trigger a build. + // + // Branch and tag names in trigger templates are interpreted as regular + // expressions. Any branch or tag change that matches that regular expression + // will trigger a build. + // + // Mutually exclusive with `github`. + RepoSource trigger_template = 7; + + // GitHubEventsConfig describes the configuration of a trigger that creates + // a build whenever a GitHub event is received. + // + // Mutually exclusive with `trigger_template`. + GitHubEventsConfig github = 13; + + // Template describing the Build request to make when the trigger is matched. + oneof build_template { + // Contents of the build template. + Build build = 4; + + // Path, from the source root, to a file whose contents is used for the + // template. + string filename = 8; + } + + // Output only. Time when the trigger was created. + google.protobuf.Timestamp create_time = 5; + + // If true, the trigger will never result in a build. + bool disabled = 9; + + // Substitutions data for Build resource. + map substitutions = 11; + + // ignored_files and included_files are file glob matches using + // http://godoc/pkg/path/filepath#Match extended with support for "**". + // + // If ignored_files and changed files are both empty, then they are + // not used to determine whether or not to trigger a build. + // + // If ignored_files is not empty, then we ignore any files that match + // any of the ignored_file globs. If the change has no files that are + // outside of the ignored_files globs, then we do not trigger a build. + repeated string ignored_files = 15; + + // If any of the files altered in the commit pass the ignored_files + // filter and included_files is empty, then as far as this filter is + // concerned, we should trigger the build. + // + // If any of the files altered in the commit pass the ignored_files + // filter and included_files is not empty, then we make sure that at + // least one of those files matches a included_files glob. If not, + // then we do not trigger a build. + repeated string included_files = 16; +} + +// GitHubEventsConfig describes the configuration of a trigger that creates a +// build whenever a GitHub event is received. +// +// This message is experimental. +message GitHubEventsConfig { + // The installationID that emits the GitHub event. + int64 installation_id = 1 [deprecated = true]; + + // Owner of the repository. For example: The owner for + // https://github.com/googlecloudplatform/cloud-builders is + // "googlecloudplatform". + string owner = 6; + + // Name of the repository. For example: The name for + // https://github.com/googlecloudplatform/cloud-builders is "cloud-builders". + string name = 7; + + // Filter describing the types of events to trigger a build. + // Currently supported event types: push, pull_request. + oneof event { + // filter to match changes in pull requests. + PullRequestFilter pull_request = 4; + + // filter to match changes in refs like branches, tags. + PushFilter push = 5; + } +} + +// PullRequestFilter contains filter properties for matching GitHub Pull +// Requests. +message PullRequestFilter { + // Controls behavior of Pull Request comments. + enum CommentControl { + // Do not require comments on Pull Requests before builds are triggered. + COMMENTS_DISABLED = 0; + + // Enforce that repository owners or collaborators must comment on Pull + // Requests before builds are triggered. + COMMENTS_ENABLED = 1; + } + + // Target refs to match. + // A target ref is the git reference where the pull request will be applied. + oneof git_ref { + // Regex of branches to match. + // + // The syntax of the regular expressions accepted is the syntax accepted by + // RE2 and described at https://github.com/google/re2/wiki/Syntax + string branch = 2; + } + + // Whether to block builds on a "/gcbrun" comment from a repository owner or + // collaborator. + CommentControl comment_control = 5; +} + +// Push contains filter properties for matching GitHub git pushes. +message PushFilter { + // Modified refs to match. + // A modified refs are the refs modified by a git push operation. + oneof git_ref { + // Regexes matching branches to build. + // + // The syntax of the regular expressions accepted is the syntax accepted by + // RE2 and described at https://github.com/google/re2/wiki/Syntax + string branch = 2; + + // Regexes matching tags to build. + // + // The syntax of the regular expressions accepted is the syntax accepted by + // RE2 and described at https://github.com/google/re2/wiki/Syntax + string tag = 3; + } +} + +// Request to create a new `BuildTrigger`. +message CreateBuildTriggerRequest { + // Required. ID of the project for which to configure automatic builds. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. `BuildTrigger` to create. + BuildTrigger trigger = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Returns the `BuildTrigger` with the specified ID. +message GetBuildTriggerRequest { + // Required. ID of the project that owns the trigger. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. ID of the `BuildTrigger` to get. + string trigger_id = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request to list existing `BuildTriggers`. +message ListBuildTriggersRequest { + // Required. ID of the project for which to list BuildTriggers. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Number of results to return in the list. + int32 page_size = 2; + + // Token to provide to skip to a particular spot in the list. + string page_token = 3; +} + +// Response containing existing `BuildTriggers`. +message ListBuildTriggersResponse { + // `BuildTriggers` for the project, sorted by `create_time` descending. + repeated BuildTrigger triggers = 1; + + // Token to receive the next page of results. + string next_page_token = 2; +} + +// Request to delete a `BuildTrigger`. +message DeleteBuildTriggerRequest { + // Required. ID of the project that owns the trigger. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. ID of the `BuildTrigger` to delete. + string trigger_id = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request to update an existing `BuildTrigger`. +message UpdateBuildTriggerRequest { + // Required. ID of the project that owns the trigger. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. ID of the `BuildTrigger` to update. + string trigger_id = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. `BuildTrigger` to update. + BuildTrigger trigger = 3 [(google.api.field_behavior) = REQUIRED]; +} + +// Optional arguments to enable specific features of builds. +message BuildOptions { + // Specifies the manner in which the build should be verified, if at all. + enum VerifyOption { + // Not a verifiable build. (default) + NOT_VERIFIED = 0; + + // Verified build. + VERIFIED = 1; + } + + // Supported VM sizes. + enum MachineType { + // Standard machine type. + UNSPECIFIED = 0; + + // Highcpu machine with 8 CPUs. + N1_HIGHCPU_8 = 1; + + // Highcpu machine with 32 CPUs. + N1_HIGHCPU_32 = 2; + } + + // Specifies the behavior when there is an error in the substitution checks. + enum SubstitutionOption { + // Fails the build if error in substitutions checks, like missing + // a substitution in the template or in the map. + MUST_MATCH = 0; + + // Do not fail the build if error in substitutions checks. + ALLOW_LOOSE = 1; + } + + // Specifies the behavior when writing build logs to Google Cloud Storage. + enum LogStreamingOption { + // Service may automatically determine build log streaming behavior. + STREAM_DEFAULT = 0; + + // Build logs should be streamed to Google Cloud Storage. + STREAM_ON = 1; + + // Build logs should not be streamed to Google Cloud Storage; they will be + // written when the build is completed. + STREAM_OFF = 2; + } + + // Specifies the logging mode. + enum LoggingMode { + // The service determines the logging mode. The default is `LEGACY`. Do not + // rely on the default logging behavior as it may change in the future. + LOGGING_UNSPECIFIED = 0; + + // Stackdriver logging and Cloud Storage logging are enabled. + LEGACY = 1; + + // Only Cloud Storage logging is enabled. + GCS_ONLY = 2; + } + + // Requested hash for SourceProvenance. + repeated Hash.HashType source_provenance_hash = 1; + + // Requested verifiability options. + VerifyOption requested_verify_option = 2; + + // Compute Engine machine type on which to run the build. + MachineType machine_type = 3; + + // Requested disk size for the VM that runs the build. Note that this is *NOT* + // "disk free"; some of the space will be used by the operating system and + // build utilities. Also note that this is the minimum disk size that will be + // allocated for the build -- the build may run with a larger disk than + // requested. At present, the maximum disk size is 1000GB; builds that request + // more than the maximum are rejected with an error. + int64 disk_size_gb = 6; + + // Option to specify behavior when there is an error in the substitution + // checks. + SubstitutionOption substitution_option = 4; + + // Option to define build log streaming behavior to Google Cloud + // Storage. + LogStreamingOption log_streaming_option = 5; + + // Option to specify a `WorkerPool` for the build. User specifies the pool + // with the format "[WORKERPOOL_PROJECT_ID]/[WORKERPOOL_NAME]". + // This is an experimental field. + string worker_pool = 7; + + // Option to specify the logging mode, which determines where the logs are + // stored. + LoggingMode logging = 11; + + // A list of global environment variable definitions that will exist for all + // build steps in this build. If a variable is defined in both globally and in + // a build step, the variable will use the build step value. + // + // The elements are of the form "KEY=VALUE" for the environment variable "KEY" + // being given the value "VALUE". + repeated string env = 12; + + // A list of global environment variables, which are encrypted using a Cloud + // Key Management Service crypto key. These values must be specified in the + // build's `Secret`. These variables will be available to all build steps + // in this build. + repeated string secret_env = 13; + + // Global list of volumes to mount for ALL build steps + // + // Each volume is created as an empty volume prior to starting the build + // process. Upon completion of the build, volumes and their contents are + // discarded. Global volume names and paths cannot conflict with the volumes + // defined a build step. + // + // Using a global volume in a build with only one step is not valid as + // it is indicative of a build request with an incorrect configuration. + repeated Volume volumes = 14; +} + +// Configuration for a WorkerPool to run the builds. +// +// Workers are machines that Cloud Build uses to run your builds. By default, +// all workers run in a project owned by Cloud Build. To have full control over +// the workers that execute your builds -- such as enabling them to access +// private resources on your private network -- you can request Cloud Build to +// run the workers in your own project by creating a custom workers pool. +message WorkerPool { + // Supported GCP regions to create the `WorkerPool`. + enum Region { + // no region + REGION_UNSPECIFIED = 0; + + // us-central1 region + US_CENTRAL1 = 1; + + // us-west1 region + US_WEST1 = 2; + + // us-east1 region + US_EAST1 = 3; + + // us-east4 region + US_EAST4 = 4; + } + + // `WorkerPool` status + enum Status { + // Status of the `WorkerPool` is unknown. + STATUS_UNSPECIFIED = 0; + + // `WorkerPool` is being created. + CREATING = 1; + + // `WorkerPool` is running. + RUNNING = 2; + + // `WorkerPool` is being deleting: cancelling builds and draining workers. + DELETING = 3; + + // `WorkerPool` is deleted. + DELETED = 4; + } + + // User-defined name of the `WorkerPool`. + string name = 14; + + // The project ID of the GCP project for which the `WorkerPool` is created. + string project_id = 2; + + // Output only. The service account used to manage the `WorkerPool`. The + // service account must have the Compute Instance Admin (Beta) permission at + // the project level. + string service_account_email = 3; + + // Total number of workers to be created across all requested regions. + int64 worker_count = 4; + + // Configuration to be used for a creating workers in the `WorkerPool`. + WorkerConfig worker_config = 16; + + // List of regions to create the `WorkerPool`. Regions can't be empty. + // If Cloud Build adds a new GCP region in the future, the existing + // `WorkerPool` will not be enabled in the new region automatically; + // you must add the new region to the `regions` field to enable the + // `WorkerPool` in that region. + repeated Region regions = 9; + + // Output only. Time at which the request to create the `WorkerPool` was + // received. + google.protobuf.Timestamp create_time = 11; + + // Output only. Time at which the request to update the `WorkerPool` was + // received. + google.protobuf.Timestamp update_time = 17; + + // Output only. Time at which the request to delete the `WorkerPool` was + // received. + google.protobuf.Timestamp delete_time = 12; + + // Output only. WorkerPool Status. + Status status = 13; +} + +// WorkerConfig defines the configuration to be used for a creating workers in +// the pool. +message WorkerConfig { + // Machine Type of the worker, such as n1-standard-1. + // See https://cloud.google.com/compute/docs/machine-types. + // If left blank, Cloud Build will use a standard unspecified machine to + // create the worker pool. + // `machine_type` is overridden if you specify a different machine type in + // `build_options`. In this case, the VM specified in the `build_options` + // will be created on demand at build time. For more information see + // https://cloud.google.com/cloud-build/docs/speeding-up-builds#using_custom_virtual_machine_sizes + string machine_type = 1; + + // Size of the disk attached to the worker, in GB. + // See https://cloud.google.com/compute/docs/disks/ + // If `0` is specified, Cloud Build will use a standard disk size. + // `disk_size` is overridden if you specify a different disk size in + // `build_options`. In this case, a VM with a disk size specified in the + // `build_options` will be created on demand at build time. For more + // information see + // https://cloud.google.com/cloud-build/docs/api/reference/rest/v1/projects.builds#buildoptions + int64 disk_size_gb = 2; + + // The network definition used to create the worker. + // If this section is left empty, the workers will be created in + // WorkerPool.project_id on the default network. + Network network = 3; + + // The tag applied to the worker, and the same tag used by the firewall rule. + // It is used to identify the Cloud Build workers among other VMs. + // The default value for tag is `worker`. + string tag = 4; +} + +// Network describes the GCP network used to create workers in. +message Network { + // Project id containing the defined network and subnetwork. For a peered VPC, + // this will be the same as the project_id in which the workers are created. + // For a shared VPC, this will be the project sharing the network with the + // project_id project in which workers will be created. For custom workers + // with no VPC, this will be the same as project_id. + string project_id = 1; + + // Network on which the workers are created. + // "default" network is used if empty. + string network = 2; + + // Subnetwork on which the workers are created. + // "default" subnetwork is used if empty. + string subnetwork = 3; +} + +// Request to create a new `WorkerPool`. +message CreateWorkerPoolRequest { + // ID of the parent project. + string parent = 1; + + // `WorkerPool` resource to create. + WorkerPool worker_pool = 2; +} + +// Request to get a `WorkerPool` with the specified name. +message GetWorkerPoolRequest { + // The field will contain name of the resource requested, for example: + // "projects/project-1/workerPools/workerpool-name" + string name = 1; +} + +// Request to delete a `WorkerPool`. +message DeleteWorkerPoolRequest { + // The field will contain name of the resource requested, for example: + // "projects/project-1/workerPools/workerpool-name" + string name = 1; +} + +// Request to update a `WorkerPool`. +message UpdateWorkerPoolRequest { + // The field will contain name of the resource requested, for example: + // "projects/project-1/workerPools/workerpool-name" + string name = 2; + + // `WorkerPool` resource to update. + WorkerPool worker_pool = 3; +} + +// Request to list `WorkerPool`s. +message ListWorkerPoolsRequest { + // ID of the parent project. + string parent = 1; +} + +// Response containing existing `WorkerPools`. +message ListWorkerPoolsResponse { + // `WorkerPools` for the project. + repeated WorkerPool worker_pools = 1; +} diff --git a/cloudbuild/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild_pb2.py b/cloudbuild/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild_pb2.py new file mode 100644 index 000000000000..a7af7be78d7d --- /dev/null +++ b/cloudbuild/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild_pb2.py @@ -0,0 +1,6356 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/devtools/cloudbuild_v1/proto/cloudbuild.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/devtools/cloudbuild_v1/proto/cloudbuild.proto", + package="google.devtools.cloudbuild.v1", + syntax="proto3", + serialized_options=_b( + "\n\030com.google.cloudbuild.v1P\001ZGgoogle.golang.org/genproto/googleapis/devtools/cloudbuild/v1;cloudbuild\242\002\003GCB" + ), + serialized_pb=_b( + '\n4google/devtools/cloudbuild_v1/proto/cloudbuild.proto\x12\x1dgoogle.devtools.cloudbuild.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"=\n\x11RetryBuildRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x02id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x8a\x01\n\x16RunBuildTriggerRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ntrigger_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12>\n\x06source\x18\x03 \x01(\x0b\x32).google.devtools.cloudbuild.v1.RepoSourceB\x03\xe0\x41\x02"C\n\rStorageSource\x12\x0e\n\x06\x62ucket\x18\x01 \x01(\t\x12\x0e\n\x06object\x18\x02 \x01(\t\x12\x12\n\ngeneration\x18\x03 \x01(\x03"\x8d\x01\n\nRepoSource\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x11\n\trepo_name\x18\x02 \x01(\t\x12\x15\n\x0b\x62ranch_name\x18\x03 \x01(\tH\x00\x12\x12\n\x08tag_name\x18\x04 \x01(\tH\x00\x12\x14\n\ncommit_sha\x18\x05 \x01(\tH\x00\x12\x0b\n\x03\x64ir\x18\x07 \x01(\tB\n\n\x08revision"\x9c\x01\n\x06Source\x12\x46\n\x0estorage_source\x18\x02 \x01(\x0b\x32,.google.devtools.cloudbuild.v1.StorageSourceH\x00\x12@\n\x0brepo_source\x18\x03 \x01(\x0b\x32).google.devtools.cloudbuild.v1.RepoSourceH\x00\x42\x08\n\x06source"h\n\nBuiltImage\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x64igest\x18\x03 \x01(\t\x12<\n\x0bpush_timing\x18\x04 \x01(\x0b\x32\'.google.devtools.cloudbuild.v1.TimeSpan"\x9f\x03\n\tBuildStep\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03\x65nv\x18\x02 \x03(\t\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x0b\n\x03\x64ir\x18\x04 \x01(\t\x12\n\n\x02id\x18\x05 \x01(\t\x12\x10\n\x08wait_for\x18\x06 \x03(\t\x12\x12\n\nentrypoint\x18\x07 \x01(\t\x12\x12\n\nsecret_env\x18\x08 \x03(\t\x12\x36\n\x07volumes\x18\t \x03(\x0b\x32%.google.devtools.cloudbuild.v1.Volume\x12\x37\n\x06timing\x18\n \x01(\x0b\x32\'.google.devtools.cloudbuild.v1.TimeSpan\x12<\n\x0bpull_timing\x18\r \x01(\x0b\x32\'.google.devtools.cloudbuild.v1.TimeSpan\x12*\n\x07timeout\x18\x0b \x01(\x0b\x32\x19.google.protobuf.Duration\x12;\n\x06status\x18\x0c \x01(\x0e\x32+.google.devtools.cloudbuild.v1.Build.Status"$\n\x06Volume\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t"\xef\x01\n\x07Results\x12\x39\n\x06images\x18\x02 \x03(\x0b\x32).google.devtools.cloudbuild.v1.BuiltImage\x12\x19\n\x11\x62uild_step_images\x18\x03 \x03(\t\x12\x19\n\x11\x61rtifact_manifest\x18\x04 \x01(\t\x12\x15\n\rnum_artifacts\x18\x05 \x01(\x03\x12\x1a\n\x12\x62uild_step_outputs\x18\x06 \x03(\x0c\x12@\n\x0f\x61rtifact_timing\x18\x07 \x01(\x0b\x32\'.google.devtools.cloudbuild.v1.TimeSpan"`\n\x0e\x41rtifactResult\x12\x10\n\x08location\x18\x01 \x01(\t\x12<\n\tfile_hash\x18\x02 \x03(\x0b\x32).google.devtools.cloudbuild.v1.FileHashes"\xe0\t\n\x05\x42uild\x12\n\n\x02id\x18\x01 \x01(\t\x12\x12\n\nproject_id\x18\x10 \x01(\t\x12;\n\x06status\x18\x02 \x01(\x0e\x32+.google.devtools.cloudbuild.v1.Build.Status\x12\x15\n\rstatus_detail\x18\x18 \x01(\t\x12\x35\n\x06source\x18\x03 \x01(\x0b\x32%.google.devtools.cloudbuild.v1.Source\x12\x37\n\x05steps\x18\x0b \x03(\x0b\x32(.google.devtools.cloudbuild.v1.BuildStep\x12\x37\n\x07results\x18\n \x01(\x0b\x32&.google.devtools.cloudbuild.v1.Results\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nstart_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x66inish_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12*\n\x07timeout\x18\x0c \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0e\n\x06images\x18\r \x03(\t\x12;\n\tartifacts\x18% \x01(\x0b\x32(.google.devtools.cloudbuild.v1.Artifacts\x12\x13\n\x0blogs_bucket\x18\x13 \x01(\t\x12J\n\x11source_provenance\x18\x15 \x01(\x0b\x32/.google.devtools.cloudbuild.v1.SourceProvenance\x12\x18\n\x10\x62uild_trigger_id\x18\x16 \x01(\t\x12<\n\x07options\x18\x17 \x01(\x0b\x32+.google.devtools.cloudbuild.v1.BuildOptions\x12\x0f\n\x07log_url\x18\x19 \x01(\t\x12N\n\rsubstitutions\x18\x1d \x03(\x0b\x32\x37.google.devtools.cloudbuild.v1.Build.SubstitutionsEntry\x12\x0c\n\x04tags\x18\x1f \x03(\t\x12\x36\n\x07secrets\x18 \x03(\x0b\x32%.google.devtools.cloudbuild.v1.Secret\x12@\n\x06timing\x18! \x03(\x0b\x32\x30.google.devtools.cloudbuild.v1.Build.TimingEntry\x1a\x34\n\x12SubstitutionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aV\n\x0bTimingEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.google.devtools.cloudbuild.v1.TimeSpan:\x02\x38\x01"\x7f\n\x06Status\x12\x12\n\x0eSTATUS_UNKNOWN\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x0b\n\x07WORKING\x10\x02\x12\x0b\n\x07SUCCESS\x10\x03\x12\x0b\n\x07\x46\x41ILURE\x10\x04\x12\x12\n\x0eINTERNAL_ERROR\x10\x05\x12\x0b\n\x07TIMEOUT\x10\x06\x12\r\n\tCANCELLED\x10\x07"\xd3\x01\n\tArtifacts\x12\x0e\n\x06images\x18\x01 \x03(\t\x12I\n\x07objects\x18\x02 \x01(\x0b\x32\x38.google.devtools.cloudbuild.v1.Artifacts.ArtifactObjects\x1ak\n\x0f\x41rtifactObjects\x12\x10\n\x08location\x18\x01 \x01(\t\x12\r\n\x05paths\x18\x02 \x03(\t\x12\x37\n\x06timing\x18\x03 \x01(\x0b\x32\'.google.devtools.cloudbuild.v1.TimeSpan"h\n\x08TimeSpan\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"M\n\x16\x42uildOperationMetadata\x12\x33\n\x05\x62uild\x18\x01 \x01(\x0b\x32$.google.devtools.cloudbuild.v1.Build"\xde\x02\n\x10SourceProvenance\x12M\n\x17resolved_storage_source\x18\x03 \x01(\x0b\x32,.google.devtools.cloudbuild.v1.StorageSource\x12G\n\x14resolved_repo_source\x18\x06 \x01(\x0b\x32).google.devtools.cloudbuild.v1.RepoSource\x12T\n\x0b\x66ile_hashes\x18\x04 \x03(\x0b\x32?.google.devtools.cloudbuild.v1.SourceProvenance.FileHashesEntry\x1a\\\n\x0f\x46ileHashesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x01(\x0b\x32).google.devtools.cloudbuild.v1.FileHashes:\x02\x38\x01"D\n\nFileHashes\x12\x36\n\tfile_hash\x18\x01 \x03(\x0b\x32#.google.devtools.cloudbuild.v1.Hash"|\n\x04Hash\x12:\n\x04type\x18\x01 \x01(\x0e\x32,.google.devtools.cloudbuild.v1.Hash.HashType\x12\r\n\x05value\x18\x02 \x01(\x0c")\n\x08HashType\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06SHA256\x10\x01\x12\x07\n\x03MD5\x10\x02"\x9a\x01\n\x06Secret\x12\x14\n\x0ckms_key_name\x18\x01 \x01(\t\x12H\n\nsecret_env\x18\x03 \x03(\x0b\x32\x34.google.devtools.cloudbuild.v1.Secret.SecretEnvEntry\x1a\x30\n\x0eSecretEnvEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c:\x02\x38\x01"g\n\x12\x43reateBuildRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x38\n\x05\x62uild\x18\x02 \x01(\x0b\x32$.google.devtools.cloudbuild.v1.BuildB\x03\xe0\x41\x02";\n\x0fGetBuildRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x02id\x18\x02 \x01(\tB\x03\xe0\x41\x02"c\n\x11ListBuildsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x08 \x01(\t"c\n\x12ListBuildsResponse\x12\x34\n\x06\x62uilds\x18\x01 \x03(\x0b\x32$.google.devtools.cloudbuild.v1.Build\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t">\n\x12\x43\x61ncelBuildRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x02id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\xaf\x04\n\x0c\x42uildTrigger\x12\n\n\x02id\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\n \x01(\t\x12\x0c\n\x04name\x18\x15 \x01(\t\x12\x0c\n\x04tags\x18\x13 \x03(\t\x12\x43\n\x10trigger_template\x18\x07 \x01(\x0b\x32).google.devtools.cloudbuild.v1.RepoSource\x12\x41\n\x06github\x18\r \x01(\x0b\x32\x31.google.devtools.cloudbuild.v1.GitHubEventsConfig\x12\x35\n\x05\x62uild\x18\x04 \x01(\x0b\x32$.google.devtools.cloudbuild.v1.BuildH\x00\x12\x12\n\x08\x66ilename\x18\x08 \x01(\tH\x00\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x10\n\x08\x64isabled\x18\t \x01(\x08\x12U\n\rsubstitutions\x18\x0b \x03(\x0b\x32>.google.devtools.cloudbuild.v1.BuildTrigger.SubstitutionsEntry\x12\x15\n\rignored_files\x18\x0f \x03(\t\x12\x16\n\x0eincluded_files\x18\x10 \x03(\t\x1a\x34\n\x12SubstitutionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x10\n\x0e\x62uild_template"\xdc\x01\n\x12GitHubEventsConfig\x12\x1b\n\x0finstallation_id\x18\x01 \x01(\x03\x42\x02\x18\x01\x12\r\n\x05owner\x18\x06 \x01(\t\x12\x0c\n\x04name\x18\x07 \x01(\t\x12H\n\x0cpull_request\x18\x04 \x01(\x0b\x32\x30.google.devtools.cloudbuild.v1.PullRequestFilterH\x00\x12\x39\n\x04push\x18\x05 \x01(\x0b\x32).google.devtools.cloudbuild.v1.PushFilterH\x00\x42\x07\n\x05\x65vent"\xc9\x01\n\x11PullRequestFilter\x12\x10\n\x06\x62ranch\x18\x02 \x01(\tH\x00\x12X\n\x0f\x63omment_control\x18\x05 \x01(\x0e\x32?.google.devtools.cloudbuild.v1.PullRequestFilter.CommentControl"=\n\x0e\x43ommentControl\x12\x15\n\x11\x43OMMENTS_DISABLED\x10\x00\x12\x14\n\x10\x43OMMENTS_ENABLED\x10\x01\x42\t\n\x07git_ref"8\n\nPushFilter\x12\x10\n\x06\x62ranch\x18\x02 \x01(\tH\x00\x12\r\n\x03tag\x18\x03 \x01(\tH\x00\x42\t\n\x07git_ref"w\n\x19\x43reateBuildTriggerRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\x07trigger\x18\x02 \x01(\x0b\x32+.google.devtools.cloudbuild.v1.BuildTriggerB\x03\xe0\x41\x02"J\n\x16GetBuildTriggerRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ntrigger_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"Z\n\x18ListBuildTriggersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"s\n\x19ListBuildTriggersResponse\x12=\n\x08triggers\x18\x01 \x03(\x0b\x32+.google.devtools.cloudbuild.v1.BuildTrigger\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"M\n\x19\x44\x65leteBuildTriggerRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ntrigger_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x90\x01\n\x19UpdateBuildTriggerRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x17\n\ntrigger_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\x07trigger\x18\x03 \x01(\x0b\x32+.google.devtools.cloudbuild.v1.BuildTriggerB\x03\xe0\x41\x02"\xc6\x07\n\x0c\x42uildOptions\x12L\n\x16source_provenance_hash\x18\x01 \x03(\x0e\x32,.google.devtools.cloudbuild.v1.Hash.HashType\x12Y\n\x17requested_verify_option\x18\x02 \x01(\x0e\x32\x38.google.devtools.cloudbuild.v1.BuildOptions.VerifyOption\x12M\n\x0cmachine_type\x18\x03 \x01(\x0e\x32\x37.google.devtools.cloudbuild.v1.BuildOptions.MachineType\x12\x14\n\x0c\x64isk_size_gb\x18\x06 \x01(\x03\x12[\n\x13substitution_option\x18\x04 \x01(\x0e\x32>.google.devtools.cloudbuild.v1.BuildOptions.SubstitutionOption\x12\\\n\x14log_streaming_option\x18\x05 \x01(\x0e\x32>.google.devtools.cloudbuild.v1.BuildOptions.LogStreamingOption\x12\x13\n\x0bworker_pool\x18\x07 \x01(\t\x12H\n\x07logging\x18\x0b \x01(\x0e\x32\x37.google.devtools.cloudbuild.v1.BuildOptions.LoggingMode\x12\x0b\n\x03\x65nv\x18\x0c \x03(\t\x12\x12\n\nsecret_env\x18\r \x03(\t\x12\x36\n\x07volumes\x18\x0e \x03(\x0b\x32%.google.devtools.cloudbuild.v1.Volume".\n\x0cVerifyOption\x12\x10\n\x0cNOT_VERIFIED\x10\x00\x12\x0c\n\x08VERIFIED\x10\x01"C\n\x0bMachineType\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x10\n\x0cN1_HIGHCPU_8\x10\x01\x12\x11\n\rN1_HIGHCPU_32\x10\x02"5\n\x12SubstitutionOption\x12\x0e\n\nMUST_MATCH\x10\x00\x12\x0f\n\x0b\x41LLOW_LOOSE\x10\x01"G\n\x12LogStreamingOption\x12\x12\n\x0eSTREAM_DEFAULT\x10\x00\x12\r\n\tSTREAM_ON\x10\x01\x12\x0e\n\nSTREAM_OFF\x10\x02"@\n\x0bLoggingMode\x12\x17\n\x13LOGGING_UNSPECIFIED\x10\x00\x12\n\n\x06LEGACY\x10\x01\x12\x0c\n\x08GCS_ONLY\x10\x02"\xf4\x04\n\nWorkerPool\x12\x0c\n\x04name\x18\x0e \x01(\t\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x1d\n\x15service_account_email\x18\x03 \x01(\t\x12\x14\n\x0cworker_count\x18\x04 \x01(\x03\x12\x42\n\rworker_config\x18\x10 \x01(\x0b\x32+.google.devtools.cloudbuild.v1.WorkerConfig\x12\x41\n\x07regions\x18\t \x03(\x0e\x32\x30.google.devtools.cloudbuild.v1.WorkerPool.Region\x12/\n\x0b\x63reate_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x64\x65lete_time\x18\x0c \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12@\n\x06status\x18\r \x01(\x0e\x32\x30.google.devtools.cloudbuild.v1.WorkerPool.Status"[\n\x06Region\x12\x16\n\x12REGION_UNSPECIFIED\x10\x00\x12\x0f\n\x0bUS_CENTRAL1\x10\x01\x12\x0c\n\x08US_WEST1\x10\x02\x12\x0c\n\x08US_EAST1\x10\x03\x12\x0c\n\x08US_EAST4\x10\x04"V\n\x06Status\x12\x16\n\x12STATUS_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x0c\n\x08\x44\x45LETING\x10\x03\x12\x0b\n\x07\x44\x45LETED\x10\x04"\x80\x01\n\x0cWorkerConfig\x12\x14\n\x0cmachine_type\x18\x01 \x01(\t\x12\x14\n\x0c\x64isk_size_gb\x18\x02 \x01(\x03\x12\x37\n\x07network\x18\x03 \x01(\x0b\x32&.google.devtools.cloudbuild.v1.Network\x12\x0b\n\x03tag\x18\x04 \x01(\t"B\n\x07Network\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0f\n\x07network\x18\x02 \x01(\t\x12\x12\n\nsubnetwork\x18\x03 \x01(\t"i\n\x17\x43reateWorkerPoolRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12>\n\x0bworker_pool\x18\x02 \x01(\x0b\x32).google.devtools.cloudbuild.v1.WorkerPool"$\n\x14GetWorkerPoolRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\'\n\x17\x44\x65leteWorkerPoolRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"g\n\x17UpdateWorkerPoolRequest\x12\x0c\n\x04name\x18\x02 \x01(\t\x12>\n\x0bworker_pool\x18\x03 \x01(\x0b\x32).google.devtools.cloudbuild.v1.WorkerPool"(\n\x16ListWorkerPoolsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t"Z\n\x17ListWorkerPoolsResponse\x12?\n\x0cworker_pools\x18\x01 \x03(\x0b\x32).google.devtools.cloudbuild.v1.WorkerPool2\xa1\x16\n\nCloudBuild\x12\xc5\x01\n\x0b\x43reateBuild\x12\x31.google.devtools.cloudbuild.v1.CreateBuildRequest\x1a\x1d.google.longrunning.Operation"d\x82\xd3\xe4\x93\x02)" /v1/projects/{project_id}/builds:\x05\x62uild\xda\x41\x10project_id,build\xca\x41\x1f\n\x05\x42uild\x12\x16\x42uildOperationMetadata\x12\x9f\x01\n\x08GetBuild\x12..google.devtools.cloudbuild.v1.GetBuildRequest\x1a$.google.devtools.cloudbuild.v1.Build"=\x82\xd3\xe4\x93\x02\'\x12%/v1/projects/{project_id}/builds/{id}\xda\x41\rproject_id,id\x12\xaf\x01\n\nListBuilds\x12\x30.google.devtools.cloudbuild.v1.ListBuildsRequest\x1a\x31.google.devtools.cloudbuild.v1.ListBuildsResponse"<\x82\xd3\xe4\x93\x02"\x12 /v1/projects/{project_id}/builds\xda\x41\x11project_id,filter\x12\xaf\x01\n\x0b\x43\x61ncelBuild\x12\x31.google.devtools.cloudbuild.v1.CancelBuildRequest\x1a$.google.devtools.cloudbuild.v1.Build"G\x82\xd3\xe4\x93\x02\x31",/v1/projects/{project_id}/builds/{id}:cancel:\x01*\xda\x41\rproject_id,id\x12\xc7\x01\n\nRetryBuild\x12\x30.google.devtools.cloudbuild.v1.RetryBuildRequest\x1a\x1d.google.longrunning.Operation"h\x82\xd3\xe4\x93\x02\x30"+/v1/projects/{project_id}/builds/{id}:retry:\x01*\xda\x41\rproject_id,id\xca\x41\x1f\n\x05\x42uild\x12\x16\x42uildOperationMetadata\x12\xc5\x01\n\x12\x43reateBuildTrigger\x12\x38.google.devtools.cloudbuild.v1.CreateBuildTriggerRequest\x1a+.google.devtools.cloudbuild.v1.BuildTrigger"H\x82\xd3\xe4\x93\x02-""/v1/projects/{project_id}/triggers:\x07trigger\xda\x41\x12project_id,trigger\x12\xc6\x01\n\x0fGetBuildTrigger\x12\x35.google.devtools.cloudbuild.v1.GetBuildTriggerRequest\x1a+.google.devtools.cloudbuild.v1.BuildTrigger"O\x82\xd3\xe4\x93\x02\x31\x12//v1/projects/{project_id}/triggers/{trigger_id}\xda\x41\x15project_id,trigger_id\x12\xbf\x01\n\x11ListBuildTriggers\x12\x37.google.devtools.cloudbuild.v1.ListBuildTriggersRequest\x1a\x38.google.devtools.cloudbuild.v1.ListBuildTriggersResponse"7\x82\xd3\xe4\x93\x02$\x12"/v1/projects/{project_id}/triggers\xda\x41\nproject_id\x12\xb7\x01\n\x12\x44\x65leteBuildTrigger\x12\x38.google.devtools.cloudbuild.v1.DeleteBuildTriggerRequest\x1a\x16.google.protobuf.Empty"O\x82\xd3\xe4\x93\x02\x31*//v1/projects/{project_id}/triggers/{trigger_id}\xda\x41\x15project_id,trigger_id\x12\xdd\x01\n\x12UpdateBuildTrigger\x12\x38.google.devtools.cloudbuild.v1.UpdateBuildTriggerRequest\x1a+.google.devtools.cloudbuild.v1.BuildTrigger"`\x82\xd3\xe4\x93\x02:2//v1/projects/{project_id}/triggers/{trigger_id}:\x07trigger\xda\x41\x1dproject_id,trigger_id,trigger\x12\xee\x01\n\x0fRunBuildTrigger\x12\x35.google.devtools.cloudbuild.v1.RunBuildTriggerRequest\x1a\x1d.google.longrunning.Operation"\x84\x01\x82\xd3\xe4\x93\x02="3/v1/projects/{project_id}/triggers/{trigger_id}:run:\x06source\xda\x41\x1cproject_id,trigger_id,source\xca\x41\x1f\n\x05\x42uild\x12\x16\x42uildOperationMetadata\x12w\n\x10\x43reateWorkerPool\x12\x36.google.devtools.cloudbuild.v1.CreateWorkerPoolRequest\x1a).google.devtools.cloudbuild.v1.WorkerPool"\x00\x12q\n\rGetWorkerPool\x12\x33.google.devtools.cloudbuild.v1.GetWorkerPoolRequest\x1a).google.devtools.cloudbuild.v1.WorkerPool"\x00\x12\x64\n\x10\x44\x65leteWorkerPool\x12\x36.google.devtools.cloudbuild.v1.DeleteWorkerPoolRequest\x1a\x16.google.protobuf.Empty"\x00\x12w\n\x10UpdateWorkerPool\x12\x36.google.devtools.cloudbuild.v1.UpdateWorkerPoolRequest\x1a).google.devtools.cloudbuild.v1.WorkerPool"\x00\x12\x82\x01\n\x0fListWorkerPools\x12\x35.google.devtools.cloudbuild.v1.ListWorkerPoolsRequest\x1a\x36.google.devtools.cloudbuild.v1.ListWorkerPoolsResponse"\x00\x1aM\xca\x41\x19\x63loudbuild.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBk\n\x18\x63om.google.cloudbuild.v1P\x01ZGgoogle.golang.org/genproto/googleapis/devtools/cloudbuild/v1;cloudbuild\xa2\x02\x03GCBb\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, + google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) + + +_BUILD_STATUS = _descriptor.EnumDescriptor( + name="Status", + full_name="google.devtools.cloudbuild.v1.Build.Status", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STATUS_UNKNOWN", index=0, number=0, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="QUEUED", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="WORKING", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SUCCESS", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="FAILURE", index=4, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="INTERNAL_ERROR", index=5, number=5, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="TIMEOUT", index=6, number=6, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLED", index=7, number=7, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=2906, + serialized_end=3033, +) +_sym_db.RegisterEnumDescriptor(_BUILD_STATUS) + +_HASH_HASHTYPE = _descriptor.EnumDescriptor( + name="HashType", + full_name="google.devtools.cloudbuild.v1.Hash.HashType", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="NONE", index=0, number=0, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SHA256", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="MD5", index=2, number=2, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=3940, + serialized_end=3981, +) +_sym_db.RegisterEnumDescriptor(_HASH_HASHTYPE) + +_PULLREQUESTFILTER_COMMENTCONTROL = _descriptor.EnumDescriptor( + name="CommentControl", + full_name="google.devtools.cloudbuild.v1.PullRequestFilter.CommentControl", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="COMMENTS_DISABLED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="COMMENTS_ENABLED", + index=1, + number=1, + serialized_options=None, + type=None, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=5487, + serialized_end=5548, +) +_sym_db.RegisterEnumDescriptor(_PULLREQUESTFILTER_COMMENTCONTROL) + +_BUILDOPTIONS_VERIFYOPTION = _descriptor.EnumDescriptor( + name="VerifyOption", + full_name="google.devtools.cloudbuild.v1.BuildOptions.VerifyOption", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="NOT_VERIFIED", index=0, number=0, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="VERIFIED", index=1, number=1, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=6909, + serialized_end=6955, +) +_sym_db.RegisterEnumDescriptor(_BUILDOPTIONS_VERIFYOPTION) + +_BUILDOPTIONS_MACHINETYPE = _descriptor.EnumDescriptor( + name="MachineType", + full_name="google.devtools.cloudbuild.v1.BuildOptions.MachineType", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="UNSPECIFIED", index=0, number=0, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="N1_HIGHCPU_8", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="N1_HIGHCPU_32", index=2, number=2, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=6957, + serialized_end=7024, +) +_sym_db.RegisterEnumDescriptor(_BUILDOPTIONS_MACHINETYPE) + +_BUILDOPTIONS_SUBSTITUTIONOPTION = _descriptor.EnumDescriptor( + name="SubstitutionOption", + full_name="google.devtools.cloudbuild.v1.BuildOptions.SubstitutionOption", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="MUST_MATCH", index=0, number=0, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ALLOW_LOOSE", index=1, number=1, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=7026, + serialized_end=7079, +) +_sym_db.RegisterEnumDescriptor(_BUILDOPTIONS_SUBSTITUTIONOPTION) + +_BUILDOPTIONS_LOGSTREAMINGOPTION = _descriptor.EnumDescriptor( + name="LogStreamingOption", + full_name="google.devtools.cloudbuild.v1.BuildOptions.LogStreamingOption", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STREAM_DEFAULT", index=0, number=0, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="STREAM_ON", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="STREAM_OFF", index=2, number=2, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=7081, + serialized_end=7152, +) +_sym_db.RegisterEnumDescriptor(_BUILDOPTIONS_LOGSTREAMINGOPTION) + +_BUILDOPTIONS_LOGGINGMODE = _descriptor.EnumDescriptor( + name="LoggingMode", + full_name="google.devtools.cloudbuild.v1.BuildOptions.LoggingMode", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="LOGGING_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="LEGACY", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="GCS_ONLY", index=2, number=2, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=7154, + serialized_end=7218, +) +_sym_db.RegisterEnumDescriptor(_BUILDOPTIONS_LOGGINGMODE) + +_WORKERPOOL_REGION = _descriptor.EnumDescriptor( + name="Region", + full_name="google.devtools.cloudbuild.v1.WorkerPool.Region", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="REGION_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="US_CENTRAL1", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="US_WEST1", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="US_EAST1", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="US_EAST4", index=4, number=4, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=7670, + serialized_end=7761, +) +_sym_db.RegisterEnumDescriptor(_WORKERPOOL_REGION) + +_WORKERPOOL_STATUS = _descriptor.EnumDescriptor( + name="Status", + full_name="google.devtools.cloudbuild.v1.WorkerPool.Status", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STATUS_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="CREATING", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="RUNNING", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="DELETING", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="DELETED", index=4, number=4, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=7763, + serialized_end=7849, +) +_sym_db.RegisterEnumDescriptor(_WORKERPOOL_STATUS) + + +_RETRYBUILDREQUEST = _descriptor.Descriptor( + name="RetryBuildRequest", + full_name="google.devtools.cloudbuild.v1.RetryBuildRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.RetryBuildRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="id", + full_name="google.devtools.cloudbuild.v1.RetryBuildRequest.id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=306, + serialized_end=367, +) + + +_RUNBUILDTRIGGERREQUEST = _descriptor.Descriptor( + name="RunBuildTriggerRequest", + full_name="google.devtools.cloudbuild.v1.RunBuildTriggerRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.RunBuildTriggerRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="trigger_id", + full_name="google.devtools.cloudbuild.v1.RunBuildTriggerRequest.trigger_id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source", + full_name="google.devtools.cloudbuild.v1.RunBuildTriggerRequest.source", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=370, + serialized_end=508, +) + + +_STORAGESOURCE = _descriptor.Descriptor( + name="StorageSource", + full_name="google.devtools.cloudbuild.v1.StorageSource", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="bucket", + full_name="google.devtools.cloudbuild.v1.StorageSource.bucket", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="object", + full_name="google.devtools.cloudbuild.v1.StorageSource.object", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="generation", + full_name="google.devtools.cloudbuild.v1.StorageSource.generation", + index=2, + number=3, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=510, + serialized_end=577, +) + + +_REPOSOURCE = _descriptor.Descriptor( + name="RepoSource", + full_name="google.devtools.cloudbuild.v1.RepoSource", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.RepoSource.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="repo_name", + full_name="google.devtools.cloudbuild.v1.RepoSource.repo_name", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="branch_name", + full_name="google.devtools.cloudbuild.v1.RepoSource.branch_name", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="tag_name", + full_name="google.devtools.cloudbuild.v1.RepoSource.tag_name", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="commit_sha", + full_name="google.devtools.cloudbuild.v1.RepoSource.commit_sha", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="dir", + full_name="google.devtools.cloudbuild.v1.RepoSource.dir", + index=5, + number=7, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="revision", + full_name="google.devtools.cloudbuild.v1.RepoSource.revision", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=580, + serialized_end=721, +) + + +_SOURCE = _descriptor.Descriptor( + name="Source", + full_name="google.devtools.cloudbuild.v1.Source", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="storage_source", + full_name="google.devtools.cloudbuild.v1.Source.storage_source", + index=0, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="repo_source", + full_name="google.devtools.cloudbuild.v1.Source.repo_source", + index=1, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="source", + full_name="google.devtools.cloudbuild.v1.Source.source", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=724, + serialized_end=880, +) + + +_BUILTIMAGE = _descriptor.Descriptor( + name="BuiltImage", + full_name="google.devtools.cloudbuild.v1.BuiltImage", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.devtools.cloudbuild.v1.BuiltImage.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="digest", + full_name="google.devtools.cloudbuild.v1.BuiltImage.digest", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="push_timing", + full_name="google.devtools.cloudbuild.v1.BuiltImage.push_timing", + index=2, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=882, + serialized_end=986, +) + + +_BUILDSTEP = _descriptor.Descriptor( + name="BuildStep", + full_name="google.devtools.cloudbuild.v1.BuildStep", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.devtools.cloudbuild.v1.BuildStep.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="env", + full_name="google.devtools.cloudbuild.v1.BuildStep.env", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="args", + full_name="google.devtools.cloudbuild.v1.BuildStep.args", + index=2, + number=3, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="dir", + full_name="google.devtools.cloudbuild.v1.BuildStep.dir", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="id", + full_name="google.devtools.cloudbuild.v1.BuildStep.id", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="wait_for", + full_name="google.devtools.cloudbuild.v1.BuildStep.wait_for", + index=5, + number=6, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entrypoint", + full_name="google.devtools.cloudbuild.v1.BuildStep.entrypoint", + index=6, + number=7, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="secret_env", + full_name="google.devtools.cloudbuild.v1.BuildStep.secret_env", + index=7, + number=8, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="volumes", + full_name="google.devtools.cloudbuild.v1.BuildStep.volumes", + index=8, + number=9, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="timing", + full_name="google.devtools.cloudbuild.v1.BuildStep.timing", + index=9, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="pull_timing", + full_name="google.devtools.cloudbuild.v1.BuildStep.pull_timing", + index=10, + number=13, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="timeout", + full_name="google.devtools.cloudbuild.v1.BuildStep.timeout", + index=11, + number=11, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="status", + full_name="google.devtools.cloudbuild.v1.BuildStep.status", + index=12, + number=12, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=989, + serialized_end=1404, +) + + +_VOLUME = _descriptor.Descriptor( + name="Volume", + full_name="google.devtools.cloudbuild.v1.Volume", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.devtools.cloudbuild.v1.Volume.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="path", + full_name="google.devtools.cloudbuild.v1.Volume.path", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1406, + serialized_end=1442, +) + + +_RESULTS = _descriptor.Descriptor( + name="Results", + full_name="google.devtools.cloudbuild.v1.Results", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="images", + full_name="google.devtools.cloudbuild.v1.Results.images", + index=0, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="build_step_images", + full_name="google.devtools.cloudbuild.v1.Results.build_step_images", + index=1, + number=3, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="artifact_manifest", + full_name="google.devtools.cloudbuild.v1.Results.artifact_manifest", + index=2, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="num_artifacts", + full_name="google.devtools.cloudbuild.v1.Results.num_artifacts", + index=3, + number=5, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="build_step_outputs", + full_name="google.devtools.cloudbuild.v1.Results.build_step_outputs", + index=4, + number=6, + type=12, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="artifact_timing", + full_name="google.devtools.cloudbuild.v1.Results.artifact_timing", + index=5, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1445, + serialized_end=1684, +) + + +_ARTIFACTRESULT = _descriptor.Descriptor( + name="ArtifactResult", + full_name="google.devtools.cloudbuild.v1.ArtifactResult", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="location", + full_name="google.devtools.cloudbuild.v1.ArtifactResult.location", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="file_hash", + full_name="google.devtools.cloudbuild.v1.ArtifactResult.file_hash", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1686, + serialized_end=1782, +) + + +_BUILD_SUBSTITUTIONSENTRY = _descriptor.Descriptor( + name="SubstitutionsEntry", + full_name="google.devtools.cloudbuild.v1.Build.SubstitutionsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.devtools.cloudbuild.v1.Build.SubstitutionsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.devtools.cloudbuild.v1.Build.SubstitutionsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2764, + serialized_end=2816, +) + +_BUILD_TIMINGENTRY = _descriptor.Descriptor( + name="TimingEntry", + full_name="google.devtools.cloudbuild.v1.Build.TimingEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.devtools.cloudbuild.v1.Build.TimingEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.devtools.cloudbuild.v1.Build.TimingEntry.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2818, + serialized_end=2904, +) + +_BUILD = _descriptor.Descriptor( + name="Build", + full_name="google.devtools.cloudbuild.v1.Build", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="id", + full_name="google.devtools.cloudbuild.v1.Build.id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.Build.project_id", + index=1, + number=16, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="status", + full_name="google.devtools.cloudbuild.v1.Build.status", + index=2, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="status_detail", + full_name="google.devtools.cloudbuild.v1.Build.status_detail", + index=3, + number=24, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source", + full_name="google.devtools.cloudbuild.v1.Build.source", + index=4, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="steps", + full_name="google.devtools.cloudbuild.v1.Build.steps", + index=5, + number=11, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="results", + full_name="google.devtools.cloudbuild.v1.Build.results", + index=6, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.devtools.cloudbuild.v1.Build.create_time", + index=7, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="start_time", + full_name="google.devtools.cloudbuild.v1.Build.start_time", + index=8, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="finish_time", + full_name="google.devtools.cloudbuild.v1.Build.finish_time", + index=9, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="timeout", + full_name="google.devtools.cloudbuild.v1.Build.timeout", + index=10, + number=12, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="images", + full_name="google.devtools.cloudbuild.v1.Build.images", + index=11, + number=13, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="artifacts", + full_name="google.devtools.cloudbuild.v1.Build.artifacts", + index=12, + number=37, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="logs_bucket", + full_name="google.devtools.cloudbuild.v1.Build.logs_bucket", + index=13, + number=19, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_provenance", + full_name="google.devtools.cloudbuild.v1.Build.source_provenance", + index=14, + number=21, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="build_trigger_id", + full_name="google.devtools.cloudbuild.v1.Build.build_trigger_id", + index=15, + number=22, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="options", + full_name="google.devtools.cloudbuild.v1.Build.options", + index=16, + number=23, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="log_url", + full_name="google.devtools.cloudbuild.v1.Build.log_url", + index=17, + number=25, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="substitutions", + full_name="google.devtools.cloudbuild.v1.Build.substitutions", + index=18, + number=29, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="tags", + full_name="google.devtools.cloudbuild.v1.Build.tags", + index=19, + number=31, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="secrets", + full_name="google.devtools.cloudbuild.v1.Build.secrets", + index=20, + number=32, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="timing", + full_name="google.devtools.cloudbuild.v1.Build.timing", + index=21, + number=33, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_BUILD_SUBSTITUTIONSENTRY, _BUILD_TIMINGENTRY], + enum_types=[_BUILD_STATUS], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1785, + serialized_end=3033, +) + + +_ARTIFACTS_ARTIFACTOBJECTS = _descriptor.Descriptor( + name="ArtifactObjects", + full_name="google.devtools.cloudbuild.v1.Artifacts.ArtifactObjects", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="location", + full_name="google.devtools.cloudbuild.v1.Artifacts.ArtifactObjects.location", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="paths", + full_name="google.devtools.cloudbuild.v1.Artifacts.ArtifactObjects.paths", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="timing", + full_name="google.devtools.cloudbuild.v1.Artifacts.ArtifactObjects.timing", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3140, + serialized_end=3247, +) + +_ARTIFACTS = _descriptor.Descriptor( + name="Artifacts", + full_name="google.devtools.cloudbuild.v1.Artifacts", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="images", + full_name="google.devtools.cloudbuild.v1.Artifacts.images", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="objects", + full_name="google.devtools.cloudbuild.v1.Artifacts.objects", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_ARTIFACTS_ARTIFACTOBJECTS], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3036, + serialized_end=3247, +) + + +_TIMESPAN = _descriptor.Descriptor( + name="TimeSpan", + full_name="google.devtools.cloudbuild.v1.TimeSpan", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="start_time", + full_name="google.devtools.cloudbuild.v1.TimeSpan.start_time", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="google.devtools.cloudbuild.v1.TimeSpan.end_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3249, + serialized_end=3353, +) + + +_BUILDOPERATIONMETADATA = _descriptor.Descriptor( + name="BuildOperationMetadata", + full_name="google.devtools.cloudbuild.v1.BuildOperationMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="build", + full_name="google.devtools.cloudbuild.v1.BuildOperationMetadata.build", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3355, + serialized_end=3432, +) + + +_SOURCEPROVENANCE_FILEHASHESENTRY = _descriptor.Descriptor( + name="FileHashesEntry", + full_name="google.devtools.cloudbuild.v1.SourceProvenance.FileHashesEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.devtools.cloudbuild.v1.SourceProvenance.FileHashesEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.devtools.cloudbuild.v1.SourceProvenance.FileHashesEntry.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3693, + serialized_end=3785, +) + +_SOURCEPROVENANCE = _descriptor.Descriptor( + name="SourceProvenance", + full_name="google.devtools.cloudbuild.v1.SourceProvenance", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="resolved_storage_source", + full_name="google.devtools.cloudbuild.v1.SourceProvenance.resolved_storage_source", + index=0, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resolved_repo_source", + full_name="google.devtools.cloudbuild.v1.SourceProvenance.resolved_repo_source", + index=1, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="file_hashes", + full_name="google.devtools.cloudbuild.v1.SourceProvenance.file_hashes", + index=2, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_SOURCEPROVENANCE_FILEHASHESENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3435, + serialized_end=3785, +) + + +_FILEHASHES = _descriptor.Descriptor( + name="FileHashes", + full_name="google.devtools.cloudbuild.v1.FileHashes", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="file_hash", + full_name="google.devtools.cloudbuild.v1.FileHashes.file_hash", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3787, + serialized_end=3855, +) + + +_HASH = _descriptor.Descriptor( + name="Hash", + full_name="google.devtools.cloudbuild.v1.Hash", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="type", + full_name="google.devtools.cloudbuild.v1.Hash.type", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.devtools.cloudbuild.v1.Hash.value", + index=1, + number=2, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_HASH_HASHTYPE], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3857, + serialized_end=3981, +) + + +_SECRET_SECRETENVENTRY = _descriptor.Descriptor( + name="SecretEnvEntry", + full_name="google.devtools.cloudbuild.v1.Secret.SecretEnvEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.devtools.cloudbuild.v1.Secret.SecretEnvEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.devtools.cloudbuild.v1.Secret.SecretEnvEntry.value", + index=1, + number=2, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4090, + serialized_end=4138, +) + +_SECRET = _descriptor.Descriptor( + name="Secret", + full_name="google.devtools.cloudbuild.v1.Secret", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="kms_key_name", + full_name="google.devtools.cloudbuild.v1.Secret.kms_key_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="secret_env", + full_name="google.devtools.cloudbuild.v1.Secret.secret_env", + index=1, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_SECRET_SECRETENVENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3984, + serialized_end=4138, +) + + +_CREATEBUILDREQUEST = _descriptor.Descriptor( + name="CreateBuildRequest", + full_name="google.devtools.cloudbuild.v1.CreateBuildRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.CreateBuildRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="build", + full_name="google.devtools.cloudbuild.v1.CreateBuildRequest.build", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4140, + serialized_end=4243, +) + + +_GETBUILDREQUEST = _descriptor.Descriptor( + name="GetBuildRequest", + full_name="google.devtools.cloudbuild.v1.GetBuildRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.GetBuildRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="id", + full_name="google.devtools.cloudbuild.v1.GetBuildRequest.id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4245, + serialized_end=4304, +) + + +_LISTBUILDSREQUEST = _descriptor.Descriptor( + name="ListBuildsRequest", + full_name="google.devtools.cloudbuild.v1.ListBuildsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.ListBuildsRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.devtools.cloudbuild.v1.ListBuildsRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.devtools.cloudbuild.v1.ListBuildsRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.devtools.cloudbuild.v1.ListBuildsRequest.filter", + index=3, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4306, + serialized_end=4405, +) + + +_LISTBUILDSRESPONSE = _descriptor.Descriptor( + name="ListBuildsResponse", + full_name="google.devtools.cloudbuild.v1.ListBuildsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="builds", + full_name="google.devtools.cloudbuild.v1.ListBuildsResponse.builds", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.devtools.cloudbuild.v1.ListBuildsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4407, + serialized_end=4506, +) + + +_CANCELBUILDREQUEST = _descriptor.Descriptor( + name="CancelBuildRequest", + full_name="google.devtools.cloudbuild.v1.CancelBuildRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.CancelBuildRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="id", + full_name="google.devtools.cloudbuild.v1.CancelBuildRequest.id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4508, + serialized_end=4570, +) + + +_BUILDTRIGGER_SUBSTITUTIONSENTRY = _descriptor.Descriptor( + name="SubstitutionsEntry", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.SubstitutionsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.SubstitutionsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.SubstitutionsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2764, + serialized_end=2816, +) + +_BUILDTRIGGER = _descriptor.Descriptor( + name="BuildTrigger", + full_name="google.devtools.cloudbuild.v1.BuildTrigger", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="id", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.description", + index=1, + number=10, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.name", + index=2, + number=21, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="tags", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.tags", + index=3, + number=19, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="trigger_template", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.trigger_template", + index=4, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="github", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.github", + index=5, + number=13, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="build", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.build", + index=6, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filename", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.filename", + index=7, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.create_time", + index=8, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="disabled", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.disabled", + index=9, + number=9, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="substitutions", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.substitutions", + index=10, + number=11, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="ignored_files", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.ignored_files", + index=11, + number=15, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="included_files", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.included_files", + index=12, + number=16, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_BUILDTRIGGER_SUBSTITUTIONSENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="build_template", + full_name="google.devtools.cloudbuild.v1.BuildTrigger.build_template", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=4573, + serialized_end=5132, +) + + +_GITHUBEVENTSCONFIG = _descriptor.Descriptor( + name="GitHubEventsConfig", + full_name="google.devtools.cloudbuild.v1.GitHubEventsConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="installation_id", + full_name="google.devtools.cloudbuild.v1.GitHubEventsConfig.installation_id", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\030\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="owner", + full_name="google.devtools.cloudbuild.v1.GitHubEventsConfig.owner", + index=1, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="google.devtools.cloudbuild.v1.GitHubEventsConfig.name", + index=2, + number=7, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="pull_request", + full_name="google.devtools.cloudbuild.v1.GitHubEventsConfig.pull_request", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="push", + full_name="google.devtools.cloudbuild.v1.GitHubEventsConfig.push", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="event", + full_name="google.devtools.cloudbuild.v1.GitHubEventsConfig.event", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=5135, + serialized_end=5355, +) + + +_PULLREQUESTFILTER = _descriptor.Descriptor( + name="PullRequestFilter", + full_name="google.devtools.cloudbuild.v1.PullRequestFilter", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="branch", + full_name="google.devtools.cloudbuild.v1.PullRequestFilter.branch", + index=0, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="comment_control", + full_name="google.devtools.cloudbuild.v1.PullRequestFilter.comment_control", + index=1, + number=5, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_PULLREQUESTFILTER_COMMENTCONTROL], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="git_ref", + full_name="google.devtools.cloudbuild.v1.PullRequestFilter.git_ref", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=5358, + serialized_end=5559, +) + + +_PUSHFILTER = _descriptor.Descriptor( + name="PushFilter", + full_name="google.devtools.cloudbuild.v1.PushFilter", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="branch", + full_name="google.devtools.cloudbuild.v1.PushFilter.branch", + index=0, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="tag", + full_name="google.devtools.cloudbuild.v1.PushFilter.tag", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="git_ref", + full_name="google.devtools.cloudbuild.v1.PushFilter.git_ref", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=5561, + serialized_end=5617, +) + + +_CREATEBUILDTRIGGERREQUEST = _descriptor.Descriptor( + name="CreateBuildTriggerRequest", + full_name="google.devtools.cloudbuild.v1.CreateBuildTriggerRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.CreateBuildTriggerRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="trigger", + full_name="google.devtools.cloudbuild.v1.CreateBuildTriggerRequest.trigger", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=5619, + serialized_end=5738, +) + + +_GETBUILDTRIGGERREQUEST = _descriptor.Descriptor( + name="GetBuildTriggerRequest", + full_name="google.devtools.cloudbuild.v1.GetBuildTriggerRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.GetBuildTriggerRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="trigger_id", + full_name="google.devtools.cloudbuild.v1.GetBuildTriggerRequest.trigger_id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=5740, + serialized_end=5814, +) + + +_LISTBUILDTRIGGERSREQUEST = _descriptor.Descriptor( + name="ListBuildTriggersRequest", + full_name="google.devtools.cloudbuild.v1.ListBuildTriggersRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.ListBuildTriggersRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.devtools.cloudbuild.v1.ListBuildTriggersRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.devtools.cloudbuild.v1.ListBuildTriggersRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=5816, + serialized_end=5906, +) + + +_LISTBUILDTRIGGERSRESPONSE = _descriptor.Descriptor( + name="ListBuildTriggersResponse", + full_name="google.devtools.cloudbuild.v1.ListBuildTriggersResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="triggers", + full_name="google.devtools.cloudbuild.v1.ListBuildTriggersResponse.triggers", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.devtools.cloudbuild.v1.ListBuildTriggersResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=5908, + serialized_end=6023, +) + + +_DELETEBUILDTRIGGERREQUEST = _descriptor.Descriptor( + name="DeleteBuildTriggerRequest", + full_name="google.devtools.cloudbuild.v1.DeleteBuildTriggerRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.DeleteBuildTriggerRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="trigger_id", + full_name="google.devtools.cloudbuild.v1.DeleteBuildTriggerRequest.trigger_id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=6025, + serialized_end=6102, +) + + +_UPDATEBUILDTRIGGERREQUEST = _descriptor.Descriptor( + name="UpdateBuildTriggerRequest", + full_name="google.devtools.cloudbuild.v1.UpdateBuildTriggerRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.UpdateBuildTriggerRequest.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="trigger_id", + full_name="google.devtools.cloudbuild.v1.UpdateBuildTriggerRequest.trigger_id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="trigger", + full_name="google.devtools.cloudbuild.v1.UpdateBuildTriggerRequest.trigger", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=6105, + serialized_end=6249, +) + + +_BUILDOPTIONS = _descriptor.Descriptor( + name="BuildOptions", + full_name="google.devtools.cloudbuild.v1.BuildOptions", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="source_provenance_hash", + full_name="google.devtools.cloudbuild.v1.BuildOptions.source_provenance_hash", + index=0, + number=1, + type=14, + cpp_type=8, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="requested_verify_option", + full_name="google.devtools.cloudbuild.v1.BuildOptions.requested_verify_option", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="machine_type", + full_name="google.devtools.cloudbuild.v1.BuildOptions.machine_type", + index=2, + number=3, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="disk_size_gb", + full_name="google.devtools.cloudbuild.v1.BuildOptions.disk_size_gb", + index=3, + number=6, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="substitution_option", + full_name="google.devtools.cloudbuild.v1.BuildOptions.substitution_option", + index=4, + number=4, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="log_streaming_option", + full_name="google.devtools.cloudbuild.v1.BuildOptions.log_streaming_option", + index=5, + number=5, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="worker_pool", + full_name="google.devtools.cloudbuild.v1.BuildOptions.worker_pool", + index=6, + number=7, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="logging", + full_name="google.devtools.cloudbuild.v1.BuildOptions.logging", + index=7, + number=11, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="env", + full_name="google.devtools.cloudbuild.v1.BuildOptions.env", + index=8, + number=12, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="secret_env", + full_name="google.devtools.cloudbuild.v1.BuildOptions.secret_env", + index=9, + number=13, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="volumes", + full_name="google.devtools.cloudbuild.v1.BuildOptions.volumes", + index=10, + number=14, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[ + _BUILDOPTIONS_VERIFYOPTION, + _BUILDOPTIONS_MACHINETYPE, + _BUILDOPTIONS_SUBSTITUTIONOPTION, + _BUILDOPTIONS_LOGSTREAMINGOPTION, + _BUILDOPTIONS_LOGGINGMODE, + ], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=6252, + serialized_end=7218, +) + + +_WORKERPOOL = _descriptor.Descriptor( + name="WorkerPool", + full_name="google.devtools.cloudbuild.v1.WorkerPool", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.devtools.cloudbuild.v1.WorkerPool.name", + index=0, + number=14, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.WorkerPool.project_id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="service_account_email", + full_name="google.devtools.cloudbuild.v1.WorkerPool.service_account_email", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="worker_count", + full_name="google.devtools.cloudbuild.v1.WorkerPool.worker_count", + index=3, + number=4, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="worker_config", + full_name="google.devtools.cloudbuild.v1.WorkerPool.worker_config", + index=4, + number=16, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="regions", + full_name="google.devtools.cloudbuild.v1.WorkerPool.regions", + index=5, + number=9, + type=14, + cpp_type=8, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.devtools.cloudbuild.v1.WorkerPool.create_time", + index=6, + number=11, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.devtools.cloudbuild.v1.WorkerPool.update_time", + index=7, + number=17, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="delete_time", + full_name="google.devtools.cloudbuild.v1.WorkerPool.delete_time", + index=8, + number=12, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="status", + full_name="google.devtools.cloudbuild.v1.WorkerPool.status", + index=9, + number=13, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_WORKERPOOL_REGION, _WORKERPOOL_STATUS], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=7221, + serialized_end=7849, +) + + +_WORKERCONFIG = _descriptor.Descriptor( + name="WorkerConfig", + full_name="google.devtools.cloudbuild.v1.WorkerConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="machine_type", + full_name="google.devtools.cloudbuild.v1.WorkerConfig.machine_type", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="disk_size_gb", + full_name="google.devtools.cloudbuild.v1.WorkerConfig.disk_size_gb", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="network", + full_name="google.devtools.cloudbuild.v1.WorkerConfig.network", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="tag", + full_name="google.devtools.cloudbuild.v1.WorkerConfig.tag", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=7852, + serialized_end=7980, +) + + +_NETWORK = _descriptor.Descriptor( + name="Network", + full_name="google.devtools.cloudbuild.v1.Network", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.devtools.cloudbuild.v1.Network.project_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="network", + full_name="google.devtools.cloudbuild.v1.Network.network", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="subnetwork", + full_name="google.devtools.cloudbuild.v1.Network.subnetwork", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=7982, + serialized_end=8048, +) + + +_CREATEWORKERPOOLREQUEST = _descriptor.Descriptor( + name="CreateWorkerPoolRequest", + full_name="google.devtools.cloudbuild.v1.CreateWorkerPoolRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.devtools.cloudbuild.v1.CreateWorkerPoolRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="worker_pool", + full_name="google.devtools.cloudbuild.v1.CreateWorkerPoolRequest.worker_pool", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=8050, + serialized_end=8155, +) + + +_GETWORKERPOOLREQUEST = _descriptor.Descriptor( + name="GetWorkerPoolRequest", + full_name="google.devtools.cloudbuild.v1.GetWorkerPoolRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.devtools.cloudbuild.v1.GetWorkerPoolRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=8157, + serialized_end=8193, +) + + +_DELETEWORKERPOOLREQUEST = _descriptor.Descriptor( + name="DeleteWorkerPoolRequest", + full_name="google.devtools.cloudbuild.v1.DeleteWorkerPoolRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.devtools.cloudbuild.v1.DeleteWorkerPoolRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=8195, + serialized_end=8234, +) + + +_UPDATEWORKERPOOLREQUEST = _descriptor.Descriptor( + name="UpdateWorkerPoolRequest", + full_name="google.devtools.cloudbuild.v1.UpdateWorkerPoolRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.devtools.cloudbuild.v1.UpdateWorkerPoolRequest.name", + index=0, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="worker_pool", + full_name="google.devtools.cloudbuild.v1.UpdateWorkerPoolRequest.worker_pool", + index=1, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=8236, + serialized_end=8339, +) + + +_LISTWORKERPOOLSREQUEST = _descriptor.Descriptor( + name="ListWorkerPoolsRequest", + full_name="google.devtools.cloudbuild.v1.ListWorkerPoolsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.devtools.cloudbuild.v1.ListWorkerPoolsRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=8341, + serialized_end=8381, +) + + +_LISTWORKERPOOLSRESPONSE = _descriptor.Descriptor( + name="ListWorkerPoolsResponse", + full_name="google.devtools.cloudbuild.v1.ListWorkerPoolsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="worker_pools", + full_name="google.devtools.cloudbuild.v1.ListWorkerPoolsResponse.worker_pools", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=8383, + serialized_end=8473, +) + +_RUNBUILDTRIGGERREQUEST.fields_by_name["source"].message_type = _REPOSOURCE +_REPOSOURCE.oneofs_by_name["revision"].fields.append( + _REPOSOURCE.fields_by_name["branch_name"] +) +_REPOSOURCE.fields_by_name["branch_name"].containing_oneof = _REPOSOURCE.oneofs_by_name[ + "revision" +] +_REPOSOURCE.oneofs_by_name["revision"].fields.append( + _REPOSOURCE.fields_by_name["tag_name"] +) +_REPOSOURCE.fields_by_name["tag_name"].containing_oneof = _REPOSOURCE.oneofs_by_name[ + "revision" +] +_REPOSOURCE.oneofs_by_name["revision"].fields.append( + _REPOSOURCE.fields_by_name["commit_sha"] +) +_REPOSOURCE.fields_by_name["commit_sha"].containing_oneof = _REPOSOURCE.oneofs_by_name[ + "revision" +] +_SOURCE.fields_by_name["storage_source"].message_type = _STORAGESOURCE +_SOURCE.fields_by_name["repo_source"].message_type = _REPOSOURCE +_SOURCE.oneofs_by_name["source"].fields.append(_SOURCE.fields_by_name["storage_source"]) +_SOURCE.fields_by_name["storage_source"].containing_oneof = _SOURCE.oneofs_by_name[ + "source" +] +_SOURCE.oneofs_by_name["source"].fields.append(_SOURCE.fields_by_name["repo_source"]) +_SOURCE.fields_by_name["repo_source"].containing_oneof = _SOURCE.oneofs_by_name[ + "source" +] +_BUILTIMAGE.fields_by_name["push_timing"].message_type = _TIMESPAN +_BUILDSTEP.fields_by_name["volumes"].message_type = _VOLUME +_BUILDSTEP.fields_by_name["timing"].message_type = _TIMESPAN +_BUILDSTEP.fields_by_name["pull_timing"].message_type = _TIMESPAN +_BUILDSTEP.fields_by_name[ + "timeout" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_BUILDSTEP.fields_by_name["status"].enum_type = _BUILD_STATUS +_RESULTS.fields_by_name["images"].message_type = _BUILTIMAGE +_RESULTS.fields_by_name["artifact_timing"].message_type = _TIMESPAN +_ARTIFACTRESULT.fields_by_name["file_hash"].message_type = _FILEHASHES +_BUILD_SUBSTITUTIONSENTRY.containing_type = _BUILD +_BUILD_TIMINGENTRY.fields_by_name["value"].message_type = _TIMESPAN +_BUILD_TIMINGENTRY.containing_type = _BUILD +_BUILD.fields_by_name["status"].enum_type = _BUILD_STATUS +_BUILD.fields_by_name["source"].message_type = _SOURCE +_BUILD.fields_by_name["steps"].message_type = _BUILDSTEP +_BUILD.fields_by_name["results"].message_type = _RESULTS +_BUILD.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_BUILD.fields_by_name[ + "start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_BUILD.fields_by_name[ + "finish_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_BUILD.fields_by_name[ + "timeout" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_BUILD.fields_by_name["artifacts"].message_type = _ARTIFACTS +_BUILD.fields_by_name["source_provenance"].message_type = _SOURCEPROVENANCE +_BUILD.fields_by_name["options"].message_type = _BUILDOPTIONS +_BUILD.fields_by_name["substitutions"].message_type = _BUILD_SUBSTITUTIONSENTRY +_BUILD.fields_by_name["secrets"].message_type = _SECRET +_BUILD.fields_by_name["timing"].message_type = _BUILD_TIMINGENTRY +_BUILD_STATUS.containing_type = _BUILD +_ARTIFACTS_ARTIFACTOBJECTS.fields_by_name["timing"].message_type = _TIMESPAN +_ARTIFACTS_ARTIFACTOBJECTS.containing_type = _ARTIFACTS +_ARTIFACTS.fields_by_name["objects"].message_type = _ARTIFACTS_ARTIFACTOBJECTS +_TIMESPAN.fields_by_name[ + "start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TIMESPAN.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_BUILDOPERATIONMETADATA.fields_by_name["build"].message_type = _BUILD +_SOURCEPROVENANCE_FILEHASHESENTRY.fields_by_name["value"].message_type = _FILEHASHES +_SOURCEPROVENANCE_FILEHASHESENTRY.containing_type = _SOURCEPROVENANCE +_SOURCEPROVENANCE.fields_by_name[ + "resolved_storage_source" +].message_type = _STORAGESOURCE +_SOURCEPROVENANCE.fields_by_name["resolved_repo_source"].message_type = _REPOSOURCE +_SOURCEPROVENANCE.fields_by_name[ + "file_hashes" +].message_type = _SOURCEPROVENANCE_FILEHASHESENTRY +_FILEHASHES.fields_by_name["file_hash"].message_type = _HASH +_HASH.fields_by_name["type"].enum_type = _HASH_HASHTYPE +_HASH_HASHTYPE.containing_type = _HASH +_SECRET_SECRETENVENTRY.containing_type = _SECRET +_SECRET.fields_by_name["secret_env"].message_type = _SECRET_SECRETENVENTRY +_CREATEBUILDREQUEST.fields_by_name["build"].message_type = _BUILD +_LISTBUILDSRESPONSE.fields_by_name["builds"].message_type = _BUILD +_BUILDTRIGGER_SUBSTITUTIONSENTRY.containing_type = _BUILDTRIGGER +_BUILDTRIGGER.fields_by_name["trigger_template"].message_type = _REPOSOURCE +_BUILDTRIGGER.fields_by_name["github"].message_type = _GITHUBEVENTSCONFIG +_BUILDTRIGGER.fields_by_name["build"].message_type = _BUILD +_BUILDTRIGGER.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_BUILDTRIGGER.fields_by_name[ + "substitutions" +].message_type = _BUILDTRIGGER_SUBSTITUTIONSENTRY +_BUILDTRIGGER.oneofs_by_name["build_template"].fields.append( + _BUILDTRIGGER.fields_by_name["build"] +) +_BUILDTRIGGER.fields_by_name["build"].containing_oneof = _BUILDTRIGGER.oneofs_by_name[ + "build_template" +] +_BUILDTRIGGER.oneofs_by_name["build_template"].fields.append( + _BUILDTRIGGER.fields_by_name["filename"] +) +_BUILDTRIGGER.fields_by_name[ + "filename" +].containing_oneof = _BUILDTRIGGER.oneofs_by_name["build_template"] +_GITHUBEVENTSCONFIG.fields_by_name["pull_request"].message_type = _PULLREQUESTFILTER +_GITHUBEVENTSCONFIG.fields_by_name["push"].message_type = _PUSHFILTER +_GITHUBEVENTSCONFIG.oneofs_by_name["event"].fields.append( + _GITHUBEVENTSCONFIG.fields_by_name["pull_request"] +) +_GITHUBEVENTSCONFIG.fields_by_name[ + "pull_request" +].containing_oneof = _GITHUBEVENTSCONFIG.oneofs_by_name["event"] +_GITHUBEVENTSCONFIG.oneofs_by_name["event"].fields.append( + _GITHUBEVENTSCONFIG.fields_by_name["push"] +) +_GITHUBEVENTSCONFIG.fields_by_name[ + "push" +].containing_oneof = _GITHUBEVENTSCONFIG.oneofs_by_name["event"] +_PULLREQUESTFILTER.fields_by_name[ + "comment_control" +].enum_type = _PULLREQUESTFILTER_COMMENTCONTROL +_PULLREQUESTFILTER_COMMENTCONTROL.containing_type = _PULLREQUESTFILTER +_PULLREQUESTFILTER.oneofs_by_name["git_ref"].fields.append( + _PULLREQUESTFILTER.fields_by_name["branch"] +) +_PULLREQUESTFILTER.fields_by_name[ + "branch" +].containing_oneof = _PULLREQUESTFILTER.oneofs_by_name["git_ref"] +_PUSHFILTER.oneofs_by_name["git_ref"].fields.append( + _PUSHFILTER.fields_by_name["branch"] +) +_PUSHFILTER.fields_by_name["branch"].containing_oneof = _PUSHFILTER.oneofs_by_name[ + "git_ref" +] +_PUSHFILTER.oneofs_by_name["git_ref"].fields.append(_PUSHFILTER.fields_by_name["tag"]) +_PUSHFILTER.fields_by_name["tag"].containing_oneof = _PUSHFILTER.oneofs_by_name[ + "git_ref" +] +_CREATEBUILDTRIGGERREQUEST.fields_by_name["trigger"].message_type = _BUILDTRIGGER +_LISTBUILDTRIGGERSRESPONSE.fields_by_name["triggers"].message_type = _BUILDTRIGGER +_UPDATEBUILDTRIGGERREQUEST.fields_by_name["trigger"].message_type = _BUILDTRIGGER +_BUILDOPTIONS.fields_by_name["source_provenance_hash"].enum_type = _HASH_HASHTYPE +_BUILDOPTIONS.fields_by_name[ + "requested_verify_option" +].enum_type = _BUILDOPTIONS_VERIFYOPTION +_BUILDOPTIONS.fields_by_name["machine_type"].enum_type = _BUILDOPTIONS_MACHINETYPE +_BUILDOPTIONS.fields_by_name[ + "substitution_option" +].enum_type = _BUILDOPTIONS_SUBSTITUTIONOPTION +_BUILDOPTIONS.fields_by_name[ + "log_streaming_option" +].enum_type = _BUILDOPTIONS_LOGSTREAMINGOPTION +_BUILDOPTIONS.fields_by_name["logging"].enum_type = _BUILDOPTIONS_LOGGINGMODE +_BUILDOPTIONS.fields_by_name["volumes"].message_type = _VOLUME +_BUILDOPTIONS_VERIFYOPTION.containing_type = _BUILDOPTIONS +_BUILDOPTIONS_MACHINETYPE.containing_type = _BUILDOPTIONS +_BUILDOPTIONS_SUBSTITUTIONOPTION.containing_type = _BUILDOPTIONS +_BUILDOPTIONS_LOGSTREAMINGOPTION.containing_type = _BUILDOPTIONS +_BUILDOPTIONS_LOGGINGMODE.containing_type = _BUILDOPTIONS +_WORKERPOOL.fields_by_name["worker_config"].message_type = _WORKERCONFIG +_WORKERPOOL.fields_by_name["regions"].enum_type = _WORKERPOOL_REGION +_WORKERPOOL.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_WORKERPOOL.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_WORKERPOOL.fields_by_name[ + "delete_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_WORKERPOOL.fields_by_name["status"].enum_type = _WORKERPOOL_STATUS +_WORKERPOOL_REGION.containing_type = _WORKERPOOL +_WORKERPOOL_STATUS.containing_type = _WORKERPOOL +_WORKERCONFIG.fields_by_name["network"].message_type = _NETWORK +_CREATEWORKERPOOLREQUEST.fields_by_name["worker_pool"].message_type = _WORKERPOOL +_UPDATEWORKERPOOLREQUEST.fields_by_name["worker_pool"].message_type = _WORKERPOOL +_LISTWORKERPOOLSRESPONSE.fields_by_name["worker_pools"].message_type = _WORKERPOOL +DESCRIPTOR.message_types_by_name["RetryBuildRequest"] = _RETRYBUILDREQUEST +DESCRIPTOR.message_types_by_name["RunBuildTriggerRequest"] = _RUNBUILDTRIGGERREQUEST +DESCRIPTOR.message_types_by_name["StorageSource"] = _STORAGESOURCE +DESCRIPTOR.message_types_by_name["RepoSource"] = _REPOSOURCE +DESCRIPTOR.message_types_by_name["Source"] = _SOURCE +DESCRIPTOR.message_types_by_name["BuiltImage"] = _BUILTIMAGE +DESCRIPTOR.message_types_by_name["BuildStep"] = _BUILDSTEP +DESCRIPTOR.message_types_by_name["Volume"] = _VOLUME +DESCRIPTOR.message_types_by_name["Results"] = _RESULTS +DESCRIPTOR.message_types_by_name["ArtifactResult"] = _ARTIFACTRESULT +DESCRIPTOR.message_types_by_name["Build"] = _BUILD +DESCRIPTOR.message_types_by_name["Artifacts"] = _ARTIFACTS +DESCRIPTOR.message_types_by_name["TimeSpan"] = _TIMESPAN +DESCRIPTOR.message_types_by_name["BuildOperationMetadata"] = _BUILDOPERATIONMETADATA +DESCRIPTOR.message_types_by_name["SourceProvenance"] = _SOURCEPROVENANCE +DESCRIPTOR.message_types_by_name["FileHashes"] = _FILEHASHES +DESCRIPTOR.message_types_by_name["Hash"] = _HASH +DESCRIPTOR.message_types_by_name["Secret"] = _SECRET +DESCRIPTOR.message_types_by_name["CreateBuildRequest"] = _CREATEBUILDREQUEST +DESCRIPTOR.message_types_by_name["GetBuildRequest"] = _GETBUILDREQUEST +DESCRIPTOR.message_types_by_name["ListBuildsRequest"] = _LISTBUILDSREQUEST +DESCRIPTOR.message_types_by_name["ListBuildsResponse"] = _LISTBUILDSRESPONSE +DESCRIPTOR.message_types_by_name["CancelBuildRequest"] = _CANCELBUILDREQUEST +DESCRIPTOR.message_types_by_name["BuildTrigger"] = _BUILDTRIGGER +DESCRIPTOR.message_types_by_name["GitHubEventsConfig"] = _GITHUBEVENTSCONFIG +DESCRIPTOR.message_types_by_name["PullRequestFilter"] = _PULLREQUESTFILTER +DESCRIPTOR.message_types_by_name["PushFilter"] = _PUSHFILTER +DESCRIPTOR.message_types_by_name[ + "CreateBuildTriggerRequest" +] = _CREATEBUILDTRIGGERREQUEST +DESCRIPTOR.message_types_by_name["GetBuildTriggerRequest"] = _GETBUILDTRIGGERREQUEST +DESCRIPTOR.message_types_by_name["ListBuildTriggersRequest"] = _LISTBUILDTRIGGERSREQUEST +DESCRIPTOR.message_types_by_name[ + "ListBuildTriggersResponse" +] = _LISTBUILDTRIGGERSRESPONSE +DESCRIPTOR.message_types_by_name[ + "DeleteBuildTriggerRequest" +] = _DELETEBUILDTRIGGERREQUEST +DESCRIPTOR.message_types_by_name[ + "UpdateBuildTriggerRequest" +] = _UPDATEBUILDTRIGGERREQUEST +DESCRIPTOR.message_types_by_name["BuildOptions"] = _BUILDOPTIONS +DESCRIPTOR.message_types_by_name["WorkerPool"] = _WORKERPOOL +DESCRIPTOR.message_types_by_name["WorkerConfig"] = _WORKERCONFIG +DESCRIPTOR.message_types_by_name["Network"] = _NETWORK +DESCRIPTOR.message_types_by_name["CreateWorkerPoolRequest"] = _CREATEWORKERPOOLREQUEST +DESCRIPTOR.message_types_by_name["GetWorkerPoolRequest"] = _GETWORKERPOOLREQUEST +DESCRIPTOR.message_types_by_name["DeleteWorkerPoolRequest"] = _DELETEWORKERPOOLREQUEST +DESCRIPTOR.message_types_by_name["UpdateWorkerPoolRequest"] = _UPDATEWORKERPOOLREQUEST +DESCRIPTOR.message_types_by_name["ListWorkerPoolsRequest"] = _LISTWORKERPOOLSREQUEST +DESCRIPTOR.message_types_by_name["ListWorkerPoolsResponse"] = _LISTWORKERPOOLSRESPONSE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +RetryBuildRequest = _reflection.GeneratedProtocolMessageType( + "RetryBuildRequest", + (_message.Message,), + dict( + DESCRIPTOR=_RETRYBUILDREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Specifies a build to retry. + + + Attributes: + project_id: + Required. ID of the project. + id: + Required. Build ID of the original build. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.RetryBuildRequest) + ), +) +_sym_db.RegisterMessage(RetryBuildRequest) + +RunBuildTriggerRequest = _reflection.GeneratedProtocolMessageType( + "RunBuildTriggerRequest", + (_message.Message,), + dict( + DESCRIPTOR=_RUNBUILDTRIGGERREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Specifies a build trigger to run and the source to use. + + + Attributes: + project_id: + Required. ID of the project. + trigger_id: + Required. ID of the trigger. + source: + Required. Source to build against this trigger. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.RunBuildTriggerRequest) + ), +) +_sym_db.RegisterMessage(RunBuildTriggerRequest) + +StorageSource = _reflection.GeneratedProtocolMessageType( + "StorageSource", + (_message.Message,), + dict( + DESCRIPTOR=_STORAGESOURCE, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Location of the source in an archive file in Google Cloud Storage. + + + Attributes: + bucket: + Google Cloud Storage bucket containing the source (see `Bucket + Name Requirements + `__). + object: + Google Cloud Storage object containing the source. This + object must be a gzipped archive file (``.tar.gz``) containing + source to build. + generation: + Google Cloud Storage generation for the object. If the + generation is omitted, the latest generation will be used. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.StorageSource) + ), +) +_sym_db.RegisterMessage(StorageSource) + +RepoSource = _reflection.GeneratedProtocolMessageType( + "RepoSource", + (_message.Message,), + dict( + DESCRIPTOR=_REPOSOURCE, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Location of the source in a Google Cloud Source Repository. + + + Attributes: + project_id: + ID of the project that owns the Cloud Source Repository. If + omitted, the project ID requesting the build is assumed. + repo_name: + Name of the Cloud Source Repository. If omitted, the name + "default" is assumed. + revision: + A revision within the Cloud Source Repository must be + specified in one of these ways. + branch_name: + Regex matching branches to build. The syntax of the regular + expressions accepted is the syntax accepted by RE2 and + described at https://github.com/google/re2/wiki/Syntax + tag_name: + Regex matching tags to build. The syntax of the regular + expressions accepted is the syntax accepted by RE2 and + described at https://github.com/google/re2/wiki/Syntax + commit_sha: + Explicit commit SHA to build. + dir: + Directory, relative to the source root, in which to run the + build. This must be a relative path. If a step's ``dir`` is + specified and is an absolute path, this value is ignored for + that step's execution. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.RepoSource) + ), +) +_sym_db.RegisterMessage(RepoSource) + +Source = _reflection.GeneratedProtocolMessageType( + "Source", + (_message.Message,), + dict( + DESCRIPTOR=_SOURCE, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Location of the source in a supported storage service. + + + Attributes: + source: + Location of source. + storage_source: + If provided, get the source from this location in Google Cloud + Storage. + repo_source: + If provided, get the source from this location in a Cloud + Source Repository. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.Source) + ), +) +_sym_db.RegisterMessage(Source) + +BuiltImage = _reflection.GeneratedProtocolMessageType( + "BuiltImage", + (_message.Message,), + dict( + DESCRIPTOR=_BUILTIMAGE, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""An image built by the pipeline. + + + Attributes: + name: + Name used to push the container image to Google Container + Registry, as presented to ``docker push``. + digest: + Docker Registry 2.0 digest. + push_timing: + Output only. Stores timing information for pushing the + specified image. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.BuiltImage) + ), +) +_sym_db.RegisterMessage(BuiltImage) + +BuildStep = _reflection.GeneratedProtocolMessageType( + "BuildStep", + (_message.Message,), + dict( + DESCRIPTOR=_BUILDSTEP, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""A step in the build pipeline. + + + Attributes: + name: + Required. The name of the container image that will run this + particular build step. If the image is available in the + host's Docker daemon's cache, it will be run directly. If not, + the host will attempt to pull the image first, using the + builder service account's credentials if necessary. The + Docker daemon's cache will already have the latest versions of + all of the officially supported build steps + (https://github.com/GoogleCloudPlatform/cloud-builders). The + Docker daemon will also have cached many of the layers for + some popular images, like "ubuntu", "debian", but they will be + refreshed at the time you attempt to use them. If you built + an image in a previous build step, it will be stored in the + host's Docker daemon's cache and is available to use as the + name for a later build step. + env: + A list of environment variable definitions to be used when + running a step. The elements are of the form "KEY=VALUE" for + the environment variable "KEY" being given the value "VALUE". + args: + A list of arguments that will be presented to the step when it + is started. If the image used to run the step's container has + an entrypoint, the ``args`` are used as arguments to that + entrypoint. If the image does not define an entrypoint, the + first element in args is used as the entrypoint, and the + remainder will be used as arguments. + dir: + Working directory to use when running this step's container. + If this value is a relative path, it is relative to the + build's working directory. If this value is absolute, it may + be outside the build's working directory, in which case the + contents of the path may not be persisted across build step + executions, unless a ``volume`` for that path is specified. + If the build specifies a ``RepoSource`` with ``dir`` and a + step with a ``dir``, which specifies an absolute path, the + ``RepoSource`` ``dir`` is ignored for the step's execution. + id: + Unique identifier for this build step, used in ``wait_for`` to + reference this build step as a dependency. + wait_for: + The ID(s) of the step(s) that this build step depends on. This + build step will not start until all the build steps in + ``wait_for`` have completed successfully. If ``wait_for`` is + empty, this build step will start when all previous build + steps in the ``Build.Steps`` list have completed successfully. + entrypoint: + Entrypoint to be used instead of the build step image's + default entrypoint. If unset, the image's default entrypoint + is used. + secret_env: + A list of environment variables which are encrypted using a + Cloud Key Management Service crypto key. These values must be + specified in the build's ``Secret``. + volumes: + List of volumes to mount into the build step. Each volume is + created as an empty volume prior to execution of the build + step. Upon completion of the build, volumes and their contents + are discarded. Using a named volume in only one step is not + valid as it is indicative of a build request with an incorrect + configuration. + timing: + Output only. Stores timing information for executing this + build step. + pull_timing: + Output only. Stores timing information for pulling this build + step's builder image only. + timeout: + Time limit for executing this build step. If not defined, the + step has no time limit and will be allowed to continue to run + until either it completes or the build itself times out. + status: + Output only. Status of the build step. At this time, build + step status is only updated on build completion; step status + is not updated in real-time as the build progresses. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.BuildStep) + ), +) +_sym_db.RegisterMessage(BuildStep) + +Volume = _reflection.GeneratedProtocolMessageType( + "Volume", + (_message.Message,), + dict( + DESCRIPTOR=_VOLUME, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Volume describes a Docker container volume which is mounted into build + steps in order to persist files across build step execution. + + + Attributes: + name: + Name of the volume to mount. Volume names must be unique per + build step and must be valid names for Docker volumes. Each + named volume must be used by at least two build steps. + path: + Path at which to mount the volume. Paths must be absolute and + cannot conflict with other volume paths on the same build step + or with certain reserved volume paths. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.Volume) + ), +) +_sym_db.RegisterMessage(Volume) + +Results = _reflection.GeneratedProtocolMessageType( + "Results", + (_message.Message,), + dict( + DESCRIPTOR=_RESULTS, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Artifacts created by the build pipeline. + + + Attributes: + images: + Container images that were built as a part of the build. + build_step_images: + List of build step digests, in the order corresponding to + build step indices. + artifact_manifest: + Path to the artifact manifest. Only populated when artifacts + are uploaded. + num_artifacts: + Number of artifacts uploaded. Only populated when artifacts + are uploaded. + build_step_outputs: + List of build step outputs, produced by builder images, in the + order corresponding to build step indices. `Cloud Builders + `__ + can produce this output by writing to + ``$BUILDER_OUTPUT/output``. Only the first 4KB of data is + stored. + artifact_timing: + Time to push all non-container artifacts. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.Results) + ), +) +_sym_db.RegisterMessage(Results) + +ArtifactResult = _reflection.GeneratedProtocolMessageType( + "ArtifactResult", + (_message.Message,), + dict( + DESCRIPTOR=_ARTIFACTRESULT, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""An artifact that was uploaded during a build. This is a single record in + the artifact manifest JSON file. + + + Attributes: + location: + The path of an artifact in a Google Cloud Storage bucket, with + the generation number. For example, + ``gs://mybucket/path/to/output.jar#generation``. + file_hash: + The file hash of the artifact. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.ArtifactResult) + ), +) +_sym_db.RegisterMessage(ArtifactResult) + +Build = _reflection.GeneratedProtocolMessageType( + "Build", + (_message.Message,), + dict( + SubstitutionsEntry=_reflection.GeneratedProtocolMessageType( + "SubstitutionsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_BUILD_SUBSTITUTIONSENTRY, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2" + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.Build.SubstitutionsEntry) + ), + ), + TimingEntry=_reflection.GeneratedProtocolMessageType( + "TimingEntry", + (_message.Message,), + dict( + DESCRIPTOR=_BUILD_TIMINGENTRY, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2" + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.Build.TimingEntry) + ), + ), + DESCRIPTOR=_BUILD, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""A build resource in the Cloud Build API. + + At a high level, a ``Build`` describes where to find source code, how to + build it (for example, the builder image to run on the source), and + where to store the built artifacts. + + Fields can include the following variables, which will be expanded when + the build is created: + + - $PROJECT\_ID: the project ID of the build. + - $BUILD\_ID: the autogenerated ID of the build. + - $REPO\_NAME: the source repository name specified by RepoSource. + - $BRANCH\_NAME: the branch name specified by RepoSource. + - $TAG\_NAME: the tag name specified by RepoSource. + - $REVISION\_ID or $COMMIT\_SHA: the commit SHA specified by RepoSource + or resolved from the specified branch or tag. + - $SHORT\_SHA: first 7 characters of $REVISION\_ID or $COMMIT\_SHA. + + + Attributes: + id: + Output only. Unique identifier of the build. + project_id: + Output only. ID of the project. + status: + Output only. Status of the build. + status_detail: + Output only. Customer-readable message about the current + status. + source: + The location of the source files to build. + steps: + Required. The operations to be performed on the workspace. + results: + Output only. Results of the build. + create_time: + Output only. Time at which the request to create the build was + received. + start_time: + Output only. Time at which execution of the build was started. + finish_time: + Output only. Time at which execution of the build was + finished. The difference between finish\_time and start\_time + is the duration of the build's execution. + timeout: + Amount of time that this build should be allowed to run, to + second granularity. If this amount of time elapses, work on + the build will cease and the build status will be ``TIMEOUT``. + Default time is ten minutes. + images: + A list of images to be pushed upon the successful completion + of all build steps. The images are pushed using the builder + service account's credentials. The digests of the pushed + images will be stored in the ``Build`` resource's results + field. If any of the images fail to be pushed, the build + status is marked ``FAILURE``. + artifacts: + Artifacts produced by the build that should be uploaded upon + successful completion of all build steps. + logs_bucket: + Google Cloud Storage bucket where logs should be written (see + `Bucket Name Requirements + `__). Logs file names will be of the + format ``${logs_bucket}/log-${build_id}.txt``. + source_provenance: + Output only. A permanent fixed identifier for source. + build_trigger_id: + Output only. The ID of the ``BuildTrigger`` that triggered + this build, if it was triggered automatically. + options: + Special options for this build. + log_url: + Output only. URL to logs for this build in Google Cloud + Console. + substitutions: + Substitutions data for ``Build`` resource. + tags: + Tags for annotation of a ``Build``. These are not docker tags. + secrets: + Secrets to decrypt using Cloud Key Management Service. + timing: + Output only. Stores timing information for phases of the + build. Valid keys are: - BUILD: time to execute all build + steps - PUSH: time to push all specified images. - + FETCHSOURCE: time to fetch source. If the build does not + specify source or images, these keys will not be included. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.Build) + ), +) +_sym_db.RegisterMessage(Build) +_sym_db.RegisterMessage(Build.SubstitutionsEntry) +_sym_db.RegisterMessage(Build.TimingEntry) + +Artifacts = _reflection.GeneratedProtocolMessageType( + "Artifacts", + (_message.Message,), + dict( + ArtifactObjects=_reflection.GeneratedProtocolMessageType( + "ArtifactObjects", + (_message.Message,), + dict( + DESCRIPTOR=_ARTIFACTS_ARTIFACTOBJECTS, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Files in the workspace to upload to Cloud Storage upon successful + completion of all build steps. + + + Attributes: + location: + Cloud Storage bucket and optional object path, in the form + "gs://bucket/path/to/somewhere/". (see `Bucket Name + Requirements `__). Files in the workspace matching any + path pattern will be uploaded to Cloud Storage with this + location as a prefix. + paths: + Path globs used to match files in the build's workspace. + timing: + Output only. Stores timing information for pushing all + artifact objects. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.Artifacts.ArtifactObjects) + ), + ), + DESCRIPTOR=_ARTIFACTS, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Artifacts produced by a build that should be uploaded upon successful + completion of all build steps. + + + Attributes: + images: + A list of images to be pushed upon the successful completion + of all build steps. The images will be pushed using the + builder service account's credentials. The digests of the + pushed images will be stored in the Build resource's results + field. If any of the images fail to be pushed, the build is + marked FAILURE. + objects: + A list of objects to be uploaded to Cloud Storage upon + successful completion of all build steps. Files in the + workspace matching specified paths globs will be uploaded to + the specified Cloud Storage location using the builder service + account's credentials. The location and generation of the + uploaded objects will be stored in the Build resource's + results field. If any objects fail to be pushed, the build is + marked FAILURE. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.Artifacts) + ), +) +_sym_db.RegisterMessage(Artifacts) +_sym_db.RegisterMessage(Artifacts.ArtifactObjects) + +TimeSpan = _reflection.GeneratedProtocolMessageType( + "TimeSpan", + (_message.Message,), + dict( + DESCRIPTOR=_TIMESPAN, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Start and end times for a build execution phase. + + + Attributes: + start_time: + Start of time span. + end_time: + End of time span. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.TimeSpan) + ), +) +_sym_db.RegisterMessage(TimeSpan) + +BuildOperationMetadata = _reflection.GeneratedProtocolMessageType( + "BuildOperationMetadata", + (_message.Message,), + dict( + DESCRIPTOR=_BUILDOPERATIONMETADATA, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Metadata for build operations. + + + Attributes: + build: + The build that the operation is tracking. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.BuildOperationMetadata) + ), +) +_sym_db.RegisterMessage(BuildOperationMetadata) + +SourceProvenance = _reflection.GeneratedProtocolMessageType( + "SourceProvenance", + (_message.Message,), + dict( + FileHashesEntry=_reflection.GeneratedProtocolMessageType( + "FileHashesEntry", + (_message.Message,), + dict( + DESCRIPTOR=_SOURCEPROVENANCE_FILEHASHESENTRY, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2" + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.SourceProvenance.FileHashesEntry) + ), + ), + DESCRIPTOR=_SOURCEPROVENANCE, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Provenance of the source. Ways to find the original source, or verify + that some source was used for this build. + + + Attributes: + resolved_storage_source: + A copy of the build's ``source.storage_source``, if exists, + with any generations resolved. + resolved_repo_source: + A copy of the build's ``source.repo_source``, if exists, with + any revisions resolved. + file_hashes: + Output only. Hash(es) of the build source, which can be used + to verify that the original source integrity was maintained in + the build. Note that ``FileHashes`` will only be populated if + ``BuildOptions`` has requested a ``SourceProvenanceHash``. + The keys to this map are file paths used as build source and + the values contain the hash values for those files. If the + build source came in a single package such as a gzipped + tarfile (``.tar.gz``), the ``FileHash`` will be for the single + path to that file. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.SourceProvenance) + ), +) +_sym_db.RegisterMessage(SourceProvenance) +_sym_db.RegisterMessage(SourceProvenance.FileHashesEntry) + +FileHashes = _reflection.GeneratedProtocolMessageType( + "FileHashes", + (_message.Message,), + dict( + DESCRIPTOR=_FILEHASHES, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Container message for hashes of byte content of files, used in + SourceProvenance messages to verify integrity of source input to the + build. + + + Attributes: + file_hash: + Collection of file hashes. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.FileHashes) + ), +) +_sym_db.RegisterMessage(FileHashes) + +Hash = _reflection.GeneratedProtocolMessageType( + "Hash", + (_message.Message,), + dict( + DESCRIPTOR=_HASH, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Container message for hash values. + + + Attributes: + type: + The type of hash that was performed. + value: + The hash value. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.Hash) + ), +) +_sym_db.RegisterMessage(Hash) + +Secret = _reflection.GeneratedProtocolMessageType( + "Secret", + (_message.Message,), + dict( + SecretEnvEntry=_reflection.GeneratedProtocolMessageType( + "SecretEnvEntry", + (_message.Message,), + dict( + DESCRIPTOR=_SECRET_SECRETENVENTRY, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2" + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.Secret.SecretEnvEntry) + ), + ), + DESCRIPTOR=_SECRET, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Pairs a set of secret environment variables containing encrypted values + with the Cloud KMS key to use to decrypt the value. + + + Attributes: + kms_key_name: + Cloud KMS key name to use to decrypt these envs. + secret_env: + Map of environment variable name to its encrypted value. + Secret environment variables must be unique across all of a + build's secrets, and must be used by at least one build step. + Values can be at most 64 KB in size. There can be at most 100 + secret values across all of a build's secrets. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.Secret) + ), +) +_sym_db.RegisterMessage(Secret) +_sym_db.RegisterMessage(Secret.SecretEnvEntry) + +CreateBuildRequest = _reflection.GeneratedProtocolMessageType( + "CreateBuildRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEBUILDREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to create a new build. + + + Attributes: + project_id: + Required. ID of the project. + build: + Required. Build resource to create. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.CreateBuildRequest) + ), +) +_sym_db.RegisterMessage(CreateBuildRequest) + +GetBuildRequest = _reflection.GeneratedProtocolMessageType( + "GetBuildRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETBUILDREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to get a build. + + + Attributes: + project_id: + Required. ID of the project. + id: + Required. ID of the build. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.GetBuildRequest) + ), +) +_sym_db.RegisterMessage(GetBuildRequest) + +ListBuildsRequest = _reflection.GeneratedProtocolMessageType( + "ListBuildsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTBUILDSREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to list builds. + + + Attributes: + project_id: + Required. ID of the project. + page_size: + Number of results to return in the list. + page_token: + Token to provide to skip to a particular spot in the list. + filter: + The raw filter text to constrain the results. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.ListBuildsRequest) + ), +) +_sym_db.RegisterMessage(ListBuildsRequest) + +ListBuildsResponse = _reflection.GeneratedProtocolMessageType( + "ListBuildsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTBUILDSRESPONSE, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Response including listed builds. + + + Attributes: + builds: + Builds will be sorted by ``create_time``, descending. + next_page_token: + Token to receive the next page of results. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.ListBuildsResponse) + ), +) +_sym_db.RegisterMessage(ListBuildsResponse) + +CancelBuildRequest = _reflection.GeneratedProtocolMessageType( + "CancelBuildRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CANCELBUILDREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to cancel an ongoing build. + + + Attributes: + project_id: + Required. ID of the project. + id: + Required. ID of the build. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.CancelBuildRequest) + ), +) +_sym_db.RegisterMessage(CancelBuildRequest) + +BuildTrigger = _reflection.GeneratedProtocolMessageType( + "BuildTrigger", + (_message.Message,), + dict( + SubstitutionsEntry=_reflection.GeneratedProtocolMessageType( + "SubstitutionsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_BUILDTRIGGER_SUBSTITUTIONSENTRY, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2" + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.BuildTrigger.SubstitutionsEntry) + ), + ), + DESCRIPTOR=_BUILDTRIGGER, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Configuration for an automated build in response to source repository + changes. + + + Attributes: + id: + Output only. Unique identifier of the trigger. + description: + Human-readable description of this trigger. + name: + User assigned name of the trigger. Must be unique within the + project. + tags: + Tags for annotation of a ``BuildTrigger`` + trigger_template: + Template describing the types of source changes to trigger a + build. Branch and tag names in trigger templates are + interpreted as regular expressions. Any branch or tag change + that matches that regular expression will trigger a build. + Mutually exclusive with ``github``. + github: + GitHubEventsConfig describes the configuration of a trigger + that creates a build whenever a GitHub event is received. + Mutually exclusive with ``trigger_template``. + build_template: + Template describing the Build request to make when the trigger + is matched. + build: + Contents of the build template. + filename: + Path, from the source root, to a file whose contents is used + for the template. + create_time: + Output only. Time when the trigger was created. + disabled: + If true, the trigger will never result in a build. + substitutions: + Substitutions data for Build resource. + ignored_files: + ignored\_files and included\_files are file glob matches using + http://godoc/pkg/path/filepath#Match extended with support for + "\*\*". If ignored\_files and changed files are both empty, + then they are not used to determine whether or not to trigger + a build. If ignored\_files is not empty, then we ignore any + files that match any of the ignored\_file globs. If the change + has no files that are outside of the ignored\_files globs, + then we do not trigger a build. + included_files: + If any of the files altered in the commit pass the + ignored\_files filter and included\_files is empty, then as + far as this filter is concerned, we should trigger the build. + If any of the files altered in the commit pass the + ignored\_files filter and included\_files is not empty, then + we make sure that at least one of those files matches a + included\_files glob. If not, then we do not trigger a build. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.BuildTrigger) + ), +) +_sym_db.RegisterMessage(BuildTrigger) +_sym_db.RegisterMessage(BuildTrigger.SubstitutionsEntry) + +GitHubEventsConfig = _reflection.GeneratedProtocolMessageType( + "GitHubEventsConfig", + (_message.Message,), + dict( + DESCRIPTOR=_GITHUBEVENTSCONFIG, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""GitHubEventsConfig describes the configuration of a trigger that creates + a build whenever a GitHub event is received. + + This message is experimental. + + + Attributes: + installation_id: + The installationID that emits the GitHub event. + owner: + Owner of the repository. For example: The owner for + https://github.com/googlecloudplatform/cloud-builders is + "googlecloudplatform". + name: + Name of the repository. For example: The name for + https://github.com/googlecloudplatform/cloud-builders is + "cloud-builders". + event: + Filter describing the types of events to trigger a build. + Currently supported event types: push, pull\_request. + pull_request: + filter to match changes in pull requests. + push: + filter to match changes in refs like branches, tags. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.GitHubEventsConfig) + ), +) +_sym_db.RegisterMessage(GitHubEventsConfig) + +PullRequestFilter = _reflection.GeneratedProtocolMessageType( + "PullRequestFilter", + (_message.Message,), + dict( + DESCRIPTOR=_PULLREQUESTFILTER, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""PullRequestFilter contains filter properties for matching GitHub Pull + Requests. + + + Attributes: + git_ref: + Target refs to match. A target ref is the git reference where + the pull request will be applied. + branch: + Regex of branches to match. The syntax of the regular + expressions accepted is the syntax accepted by RE2 and + described at https://github.com/google/re2/wiki/Syntax + comment_control: + Whether to block builds on a "/gcbrun" comment from a + repository owner or collaborator. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.PullRequestFilter) + ), +) +_sym_db.RegisterMessage(PullRequestFilter) + +PushFilter = _reflection.GeneratedProtocolMessageType( + "PushFilter", + (_message.Message,), + dict( + DESCRIPTOR=_PUSHFILTER, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Push contains filter properties for matching GitHub git pushes. + + + Attributes: + git_ref: + Modified refs to match. A modified refs are the refs modified + by a git push operation. + branch: + Regexes matching branches to build. The syntax of the regular + expressions accepted is the syntax accepted by RE2 and + described at https://github.com/google/re2/wiki/Syntax + tag: + Regexes matching tags to build. The syntax of the regular + expressions accepted is the syntax accepted by RE2 and + described at https://github.com/google/re2/wiki/Syntax + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.PushFilter) + ), +) +_sym_db.RegisterMessage(PushFilter) + +CreateBuildTriggerRequest = _reflection.GeneratedProtocolMessageType( + "CreateBuildTriggerRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEBUILDTRIGGERREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to create a new ``BuildTrigger``. + + + Attributes: + project_id: + Required. ID of the project for which to configure automatic + builds. + trigger: + Required. ``BuildTrigger`` to create. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.CreateBuildTriggerRequest) + ), +) +_sym_db.RegisterMessage(CreateBuildTriggerRequest) + +GetBuildTriggerRequest = _reflection.GeneratedProtocolMessageType( + "GetBuildTriggerRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETBUILDTRIGGERREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Returns the ``BuildTrigger`` with the specified ID. + + + Attributes: + project_id: + Required. ID of the project that owns the trigger. + trigger_id: + Required. ID of the ``BuildTrigger`` to get. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.GetBuildTriggerRequest) + ), +) +_sym_db.RegisterMessage(GetBuildTriggerRequest) + +ListBuildTriggersRequest = _reflection.GeneratedProtocolMessageType( + "ListBuildTriggersRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTBUILDTRIGGERSREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to list existing ``BuildTriggers``. + + + Attributes: + project_id: + Required. ID of the project for which to list BuildTriggers. + page_size: + Number of results to return in the list. + page_token: + Token to provide to skip to a particular spot in the list. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.ListBuildTriggersRequest) + ), +) +_sym_db.RegisterMessage(ListBuildTriggersRequest) + +ListBuildTriggersResponse = _reflection.GeneratedProtocolMessageType( + "ListBuildTriggersResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTBUILDTRIGGERSRESPONSE, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Response containing existing ``BuildTriggers``. + + + Attributes: + triggers: + ``BuildTriggers`` for the project, sorted by ``create_time`` + descending. + next_page_token: + Token to receive the next page of results. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.ListBuildTriggersResponse) + ), +) +_sym_db.RegisterMessage(ListBuildTriggersResponse) + +DeleteBuildTriggerRequest = _reflection.GeneratedProtocolMessageType( + "DeleteBuildTriggerRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEBUILDTRIGGERREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to delete a ``BuildTrigger``. + + + Attributes: + project_id: + Required. ID of the project that owns the trigger. + trigger_id: + Required. ID of the ``BuildTrigger`` to delete. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.DeleteBuildTriggerRequest) + ), +) +_sym_db.RegisterMessage(DeleteBuildTriggerRequest) + +UpdateBuildTriggerRequest = _reflection.GeneratedProtocolMessageType( + "UpdateBuildTriggerRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATEBUILDTRIGGERREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to update an existing ``BuildTrigger``. + + + Attributes: + project_id: + Required. ID of the project that owns the trigger. + trigger_id: + Required. ID of the ``BuildTrigger`` to update. + trigger: + Required. ``BuildTrigger`` to update. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.UpdateBuildTriggerRequest) + ), +) +_sym_db.RegisterMessage(UpdateBuildTriggerRequest) + +BuildOptions = _reflection.GeneratedProtocolMessageType( + "BuildOptions", + (_message.Message,), + dict( + DESCRIPTOR=_BUILDOPTIONS, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Optional arguments to enable specific features of builds. + + + Attributes: + source_provenance_hash: + Requested hash for SourceProvenance. + requested_verify_option: + Requested verifiability options. + machine_type: + Compute Engine machine type on which to run the build. + disk_size_gb: + Requested disk size for the VM that runs the build. Note that + this is *NOT* "disk free"; some of the space will be used by + the operating system and build utilities. Also note that this + is the minimum disk size that will be allocated for the build + -- the build may run with a larger disk than requested. At + present, the maximum disk size is 1000GB; builds that request + more than the maximum are rejected with an error. + substitution_option: + Option to specify behavior when there is an error in the + substitution checks. + log_streaming_option: + Option to define build log streaming behavior to Google Cloud + Storage. + worker_pool: + Option to specify a ``WorkerPool`` for the build. User + specifies the pool with the format + "[WORKERPOOL\_PROJECT\_ID]/[WORKERPOOL\_NAME]". This is an + experimental field. + logging: + Option to specify the logging mode, which determines where the + logs are stored. + env: + A list of global environment variable definitions that will + exist for all build steps in this build. If a variable is + defined in both globally and in a build step, the variable + will use the build step value. The elements are of the form + "KEY=VALUE" for the environment variable "KEY" being given the + value "VALUE". + secret_env: + A list of global environment variables, which are encrypted + using a Cloud Key Management Service crypto key. These values + must be specified in the build's ``Secret``. These variables + will be available to all build steps in this build. + volumes: + Global list of volumes to mount for ALL build steps Each + volume is created as an empty volume prior to starting the + build process. Upon completion of the build, volumes and their + contents are discarded. Global volume names and paths cannot + conflict with the volumes defined a build step. Using a + global volume in a build with only one step is not valid as it + is indicative of a build request with an incorrect + configuration. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.BuildOptions) + ), +) +_sym_db.RegisterMessage(BuildOptions) + +WorkerPool = _reflection.GeneratedProtocolMessageType( + "WorkerPool", + (_message.Message,), + dict( + DESCRIPTOR=_WORKERPOOL, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Configuration for a WorkerPool to run the builds. + + Workers are machines that Cloud Build uses to run your builds. By + default, all workers run in a project owned by Cloud Build. To have full + control over the workers that execute your builds -- such as enabling + them to access private resources on your private network -- you can + request Cloud Build to run the workers in your own project by creating a + custom workers pool. + + + Attributes: + name: + User-defined name of the ``WorkerPool``. + project_id: + The project ID of the GCP project for which the ``WorkerPool`` + is created. + service_account_email: + Output only. The service account used to manage the + ``WorkerPool``. The service account must have the Compute + Instance Admin (Beta) permission at the project level. + worker_count: + Total number of workers to be created across all requested + regions. + worker_config: + Configuration to be used for a creating workers in the + ``WorkerPool``. + regions: + List of regions to create the ``WorkerPool``. Regions can't be + empty. If Cloud Build adds a new GCP region in the future, the + existing ``WorkerPool`` will not be enabled in the new region + automatically; you must add the new region to the ``regions`` + field to enable the ``WorkerPool`` in that region. + create_time: + Output only. Time at which the request to create the + ``WorkerPool`` was received. + update_time: + Output only. Time at which the request to update the + ``WorkerPool`` was received. + delete_time: + Output only. Time at which the request to delete the + ``WorkerPool`` was received. + status: + Output only. WorkerPool Status. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.WorkerPool) + ), +) +_sym_db.RegisterMessage(WorkerPool) + +WorkerConfig = _reflection.GeneratedProtocolMessageType( + "WorkerConfig", + (_message.Message,), + dict( + DESCRIPTOR=_WORKERCONFIG, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""WorkerConfig defines the configuration to be used for a creating workers + in the pool. + + + Attributes: + machine_type: + Machine Type of the worker, such as n1-standard-1. See + https://cloud.google.com/compute/docs/machine-types. If left + blank, Cloud Build will use a standard unspecified machine to + create the worker pool. ``machine_type`` is overridden if you + specify a different machine type in ``build_options``. In this + case, the VM specified in the ``build_options`` will be + created on demand at build time. For more information see + https://cloud.google.com/cloud-build/docs/speeding-up- + builds#using\_custom\_virtual\_machine\_sizes + disk_size_gb: + Size of the disk attached to the worker, in GB. See + https://cloud.google.com/compute/docs/disks/ If ``0`` is + specified, Cloud Build will use a standard disk size. + ``disk_size`` is overridden if you specify a different disk + size in ``build_options``. In this case, a VM with a disk size + specified in the ``build_options`` will be created on demand + at build time. For more information see + https://cloud.google.com/cloud- + build/docs/api/reference/rest/v1/projects.builds#buildoptions + network: + The network definition used to create the worker. If this + section is left empty, the workers will be created in + WorkerPool.project\_id on the default network. + tag: + The tag applied to the worker, and the same tag used by the + firewall rule. It is used to identify the Cloud Build workers + among other VMs. The default value for tag is ``worker``. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.WorkerConfig) + ), +) +_sym_db.RegisterMessage(WorkerConfig) + +Network = _reflection.GeneratedProtocolMessageType( + "Network", + (_message.Message,), + dict( + DESCRIPTOR=_NETWORK, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Network describes the GCP network used to create workers in. + + + Attributes: + project_id: + Project id containing the defined network and subnetwork. For + a peered VPC, this will be the same as the project\_id in + which the workers are created. For a shared VPC, this will be + the project sharing the network with the project\_id project + in which workers will be created. For custom workers with no + VPC, this will be the same as project\_id. + network: + Network on which the workers are created. "default" network is + used if empty. + subnetwork: + Subnetwork on which the workers are created. "default" + subnetwork is used if empty. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.Network) + ), +) +_sym_db.RegisterMessage(Network) + +CreateWorkerPoolRequest = _reflection.GeneratedProtocolMessageType( + "CreateWorkerPoolRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEWORKERPOOLREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to create a new ``WorkerPool``. + + + Attributes: + parent: + ID of the parent project. + worker_pool: + ``WorkerPool`` resource to create. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.CreateWorkerPoolRequest) + ), +) +_sym_db.RegisterMessage(CreateWorkerPoolRequest) + +GetWorkerPoolRequest = _reflection.GeneratedProtocolMessageType( + "GetWorkerPoolRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETWORKERPOOLREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to get a ``WorkerPool`` with the specified name. + + + Attributes: + name: + The field will contain name of the resource requested, for + example: "projects/project-1/workerPools/workerpool-name" + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.GetWorkerPoolRequest) + ), +) +_sym_db.RegisterMessage(GetWorkerPoolRequest) + +DeleteWorkerPoolRequest = _reflection.GeneratedProtocolMessageType( + "DeleteWorkerPoolRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEWORKERPOOLREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to delete a ``WorkerPool``. + + + Attributes: + name: + The field will contain name of the resource requested, for + example: "projects/project-1/workerPools/workerpool-name" + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.DeleteWorkerPoolRequest) + ), +) +_sym_db.RegisterMessage(DeleteWorkerPoolRequest) + +UpdateWorkerPoolRequest = _reflection.GeneratedProtocolMessageType( + "UpdateWorkerPoolRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATEWORKERPOOLREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to update a ``WorkerPool``. + + + Attributes: + name: + The field will contain name of the resource requested, for + example: "projects/project-1/workerPools/workerpool-name" + worker_pool: + ``WorkerPool`` resource to update. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.UpdateWorkerPoolRequest) + ), +) +_sym_db.RegisterMessage(UpdateWorkerPoolRequest) + +ListWorkerPoolsRequest = _reflection.GeneratedProtocolMessageType( + "ListWorkerPoolsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTWORKERPOOLSREQUEST, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Request to list ``WorkerPool``\ s. + + + Attributes: + parent: + ID of the parent project. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.ListWorkerPoolsRequest) + ), +) +_sym_db.RegisterMessage(ListWorkerPoolsRequest) + +ListWorkerPoolsResponse = _reflection.GeneratedProtocolMessageType( + "ListWorkerPoolsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTWORKERPOOLSRESPONSE, + __module__="google.devtools.cloudbuild_v1.proto.cloudbuild_pb2", + __doc__="""Response containing existing ``WorkerPools``. + + + Attributes: + worker_pools: + ``WorkerPools`` for the project. + """, + # @@protoc_insertion_point(class_scope:google.devtools.cloudbuild.v1.ListWorkerPoolsResponse) + ), +) +_sym_db.RegisterMessage(ListWorkerPoolsResponse) + + +DESCRIPTOR._options = None +_RETRYBUILDREQUEST.fields_by_name["project_id"]._options = None +_RETRYBUILDREQUEST.fields_by_name["id"]._options = None +_RUNBUILDTRIGGERREQUEST.fields_by_name["project_id"]._options = None +_RUNBUILDTRIGGERREQUEST.fields_by_name["trigger_id"]._options = None +_RUNBUILDTRIGGERREQUEST.fields_by_name["source"]._options = None +_BUILD_SUBSTITUTIONSENTRY._options = None +_BUILD_TIMINGENTRY._options = None +_SOURCEPROVENANCE_FILEHASHESENTRY._options = None +_SECRET_SECRETENVENTRY._options = None +_CREATEBUILDREQUEST.fields_by_name["project_id"]._options = None +_CREATEBUILDREQUEST.fields_by_name["build"]._options = None +_GETBUILDREQUEST.fields_by_name["project_id"]._options = None +_GETBUILDREQUEST.fields_by_name["id"]._options = None +_LISTBUILDSREQUEST.fields_by_name["project_id"]._options = None +_CANCELBUILDREQUEST.fields_by_name["project_id"]._options = None +_CANCELBUILDREQUEST.fields_by_name["id"]._options = None +_BUILDTRIGGER_SUBSTITUTIONSENTRY._options = None +_GITHUBEVENTSCONFIG.fields_by_name["installation_id"]._options = None +_CREATEBUILDTRIGGERREQUEST.fields_by_name["project_id"]._options = None +_CREATEBUILDTRIGGERREQUEST.fields_by_name["trigger"]._options = None +_GETBUILDTRIGGERREQUEST.fields_by_name["project_id"]._options = None +_GETBUILDTRIGGERREQUEST.fields_by_name["trigger_id"]._options = None +_LISTBUILDTRIGGERSREQUEST.fields_by_name["project_id"]._options = None +_DELETEBUILDTRIGGERREQUEST.fields_by_name["project_id"]._options = None +_DELETEBUILDTRIGGERREQUEST.fields_by_name["trigger_id"]._options = None +_UPDATEBUILDTRIGGERREQUEST.fields_by_name["project_id"]._options = None +_UPDATEBUILDTRIGGERREQUEST.fields_by_name["trigger_id"]._options = None +_UPDATEBUILDTRIGGERREQUEST.fields_by_name["trigger"]._options = None + +_CLOUDBUILD = _descriptor.ServiceDescriptor( + name="CloudBuild", + full_name="google.devtools.cloudbuild.v1.CloudBuild", + file=DESCRIPTOR, + index=0, + serialized_options=_b( + "\312A\031cloudbuild.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=8476, + serialized_end=11325, + methods=[ + _descriptor.MethodDescriptor( + name="CreateBuild", + full_name="google.devtools.cloudbuild.v1.CloudBuild.CreateBuild", + index=0, + containing_service=None, + input_type=_CREATEBUILDREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002)" /v1/projects/{project_id}/builds:\005build\332A\020project_id,build\312A\037\n\005Build\022\026BuildOperationMetadata' + ), + ), + _descriptor.MethodDescriptor( + name="GetBuild", + full_name="google.devtools.cloudbuild.v1.CloudBuild.GetBuild", + index=1, + containing_service=None, + input_type=_GETBUILDREQUEST, + output_type=_BUILD, + serialized_options=_b( + "\202\323\344\223\002'\022%/v1/projects/{project_id}/builds/{id}\332A\rproject_id,id" + ), + ), + _descriptor.MethodDescriptor( + name="ListBuilds", + full_name="google.devtools.cloudbuild.v1.CloudBuild.ListBuilds", + index=2, + containing_service=None, + input_type=_LISTBUILDSREQUEST, + output_type=_LISTBUILDSRESPONSE, + serialized_options=_b( + '\202\323\344\223\002"\022 /v1/projects/{project_id}/builds\332A\021project_id,filter' + ), + ), + _descriptor.MethodDescriptor( + name="CancelBuild", + full_name="google.devtools.cloudbuild.v1.CloudBuild.CancelBuild", + index=3, + containing_service=None, + input_type=_CANCELBUILDREQUEST, + output_type=_BUILD, + serialized_options=_b( + '\202\323\344\223\0021",/v1/projects/{project_id}/builds/{id}:cancel:\001*\332A\rproject_id,id' + ), + ), + _descriptor.MethodDescriptor( + name="RetryBuild", + full_name="google.devtools.cloudbuild.v1.CloudBuild.RetryBuild", + index=4, + containing_service=None, + input_type=_RETRYBUILDREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\0020"+/v1/projects/{project_id}/builds/{id}:retry:\001*\332A\rproject_id,id\312A\037\n\005Build\022\026BuildOperationMetadata' + ), + ), + _descriptor.MethodDescriptor( + name="CreateBuildTrigger", + full_name="google.devtools.cloudbuild.v1.CloudBuild.CreateBuildTrigger", + index=5, + containing_service=None, + input_type=_CREATEBUILDTRIGGERREQUEST, + output_type=_BUILDTRIGGER, + serialized_options=_b( + '\202\323\344\223\002-""/v1/projects/{project_id}/triggers:\007trigger\332A\022project_id,trigger' + ), + ), + _descriptor.MethodDescriptor( + name="GetBuildTrigger", + full_name="google.devtools.cloudbuild.v1.CloudBuild.GetBuildTrigger", + index=6, + containing_service=None, + input_type=_GETBUILDTRIGGERREQUEST, + output_type=_BUILDTRIGGER, + serialized_options=_b( + "\202\323\344\223\0021\022//v1/projects/{project_id}/triggers/{trigger_id}\332A\025project_id,trigger_id" + ), + ), + _descriptor.MethodDescriptor( + name="ListBuildTriggers", + full_name="google.devtools.cloudbuild.v1.CloudBuild.ListBuildTriggers", + index=7, + containing_service=None, + input_type=_LISTBUILDTRIGGERSREQUEST, + output_type=_LISTBUILDTRIGGERSRESPONSE, + serialized_options=_b( + '\202\323\344\223\002$\022"/v1/projects/{project_id}/triggers\332A\nproject_id' + ), + ), + _descriptor.MethodDescriptor( + name="DeleteBuildTrigger", + full_name="google.devtools.cloudbuild.v1.CloudBuild.DeleteBuildTrigger", + index=8, + containing_service=None, + input_type=_DELETEBUILDTRIGGERREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + serialized_options=_b( + "\202\323\344\223\0021*//v1/projects/{project_id}/triggers/{trigger_id}\332A\025project_id,trigger_id" + ), + ), + _descriptor.MethodDescriptor( + name="UpdateBuildTrigger", + full_name="google.devtools.cloudbuild.v1.CloudBuild.UpdateBuildTrigger", + index=9, + containing_service=None, + input_type=_UPDATEBUILDTRIGGERREQUEST, + output_type=_BUILDTRIGGER, + serialized_options=_b( + "\202\323\344\223\002:2//v1/projects/{project_id}/triggers/{trigger_id}:\007trigger\332A\035project_id,trigger_id,trigger" + ), + ), + _descriptor.MethodDescriptor( + name="RunBuildTrigger", + full_name="google.devtools.cloudbuild.v1.CloudBuild.RunBuildTrigger", + index=10, + containing_service=None, + input_type=_RUNBUILDTRIGGERREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002="3/v1/projects/{project_id}/triggers/{trigger_id}:run:\006source\332A\034project_id,trigger_id,source\312A\037\n\005Build\022\026BuildOperationMetadata' + ), + ), + _descriptor.MethodDescriptor( + name="CreateWorkerPool", + full_name="google.devtools.cloudbuild.v1.CloudBuild.CreateWorkerPool", + index=11, + containing_service=None, + input_type=_CREATEWORKERPOOLREQUEST, + output_type=_WORKERPOOL, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name="GetWorkerPool", + full_name="google.devtools.cloudbuild.v1.CloudBuild.GetWorkerPool", + index=12, + containing_service=None, + input_type=_GETWORKERPOOLREQUEST, + output_type=_WORKERPOOL, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name="DeleteWorkerPool", + full_name="google.devtools.cloudbuild.v1.CloudBuild.DeleteWorkerPool", + index=13, + containing_service=None, + input_type=_DELETEWORKERPOOLREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name="UpdateWorkerPool", + full_name="google.devtools.cloudbuild.v1.CloudBuild.UpdateWorkerPool", + index=14, + containing_service=None, + input_type=_UPDATEWORKERPOOLREQUEST, + output_type=_WORKERPOOL, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name="ListWorkerPools", + full_name="google.devtools.cloudbuild.v1.CloudBuild.ListWorkerPools", + index=15, + containing_service=None, + input_type=_LISTWORKERPOOLSREQUEST, + output_type=_LISTWORKERPOOLSRESPONSE, + serialized_options=None, + ), + ], +) +_sym_db.RegisterServiceDescriptor(_CLOUDBUILD) + +DESCRIPTOR.services_by_name["CloudBuild"] = _CLOUDBUILD + +# @@protoc_insertion_point(module_scope) diff --git a/cloudbuild/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild_pb2_grpc.py b/cloudbuild/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild_pb2_grpc.py new file mode 100644 index 000000000000..c8abcba7c2ab --- /dev/null +++ b/cloudbuild/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild_pb2_grpc.py @@ -0,0 +1,378 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.devtools.cloudbuild_v1.proto import ( + cloudbuild_pb2 as google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class CloudBuildStub(object): + """Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a `Build`, which describes the location + of the source to build, how to build the source, and where to store the + built artifacts, if any. + + A user can list previously-requested builds or get builds by their ID to + determine the status of the build. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateBuild = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuild", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.CreateBuildRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.GetBuild = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/GetBuild", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.GetBuildRequest.SerializeToString, + response_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.Build.FromString, + ) + self.ListBuilds = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/ListBuilds", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.ListBuildsRequest.SerializeToString, + response_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.ListBuildsResponse.FromString, + ) + self.CancelBuild = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/CancelBuild", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.CancelBuildRequest.SerializeToString, + response_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.Build.FromString, + ) + self.RetryBuild = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/RetryBuild", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.RetryBuildRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.CreateBuildTrigger = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/CreateBuildTrigger", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.CreateBuildTriggerRequest.SerializeToString, + response_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.BuildTrigger.FromString, + ) + self.GetBuildTrigger = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/GetBuildTrigger", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.GetBuildTriggerRequest.SerializeToString, + response_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.BuildTrigger.FromString, + ) + self.ListBuildTriggers = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/ListBuildTriggers", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.ListBuildTriggersRequest.SerializeToString, + response_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.ListBuildTriggersResponse.FromString, + ) + self.DeleteBuildTrigger = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/DeleteBuildTrigger", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.DeleteBuildTriggerRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.UpdateBuildTrigger = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/UpdateBuildTrigger", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.UpdateBuildTriggerRequest.SerializeToString, + response_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.BuildTrigger.FromString, + ) + self.RunBuildTrigger = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/RunBuildTrigger", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.RunBuildTriggerRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.CreateWorkerPool = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/CreateWorkerPool", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.CreateWorkerPoolRequest.SerializeToString, + response_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.WorkerPool.FromString, + ) + self.GetWorkerPool = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/GetWorkerPool", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.GetWorkerPoolRequest.SerializeToString, + response_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.WorkerPool.FromString, + ) + self.DeleteWorkerPool = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/DeleteWorkerPool", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.DeleteWorkerPoolRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.UpdateWorkerPool = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/UpdateWorkerPool", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.UpdateWorkerPoolRequest.SerializeToString, + response_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.WorkerPool.FromString, + ) + self.ListWorkerPools = channel.unary_unary( + "/google.devtools.cloudbuild.v1.CloudBuild/ListWorkerPools", + request_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.ListWorkerPoolsRequest.SerializeToString, + response_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.ListWorkerPoolsResponse.FromString, + ) + + +class CloudBuildServicer(object): + """Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a `Build`, which describes the location + of the source to build, how to build the source, and where to store the + built artifacts, if any. + + A user can list previously-requested builds or get builds by their ID to + determine the status of the build. + """ + + def CreateBuild(self, request, context): + """Starts a build with the specified configuration. + + This method returns a long-running `Operation`, which includes the build + ID. Pass the build ID to `GetBuild` to determine the build status (such as + `SUCCESS` or `FAILURE`). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetBuild(self, request, context): + """Returns information about a previously requested build. + + The `Build` that is returned includes its status (such as `SUCCESS`, + `FAILURE`, or `WORKING`), and timing information. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListBuilds(self, request, context): + """Lists previously requested builds. + + Previously requested builds may still be in-progress, or may have finished + successfully or unsuccessfully. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def CancelBuild(self, request, context): + """Cancels a build in progress. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def RetryBuild(self, request, context): + """Creates a new build based on the specified build. + + This method creates a new build using the original build request, which may + or may not result in an identical build. + + For triggered builds: + + * Triggered builds resolve to a precise revision; therefore a retry of a + triggered build will result in a build that uses the same revision. + + For non-triggered builds that specify `RepoSource`: + + * If the original build built from the tip of a branch, the retried build + will build from the tip of that branch, which may not be the same revision + as the original build. + * If the original build specified a commit sha or revision ID, the retried + build will use the identical source. + + For builds that specify `StorageSource`: + + * If the original build pulled source from Google Cloud Storage without + specifying the generation of the object, the new build will use the current + object, which may be different from the original build source. + * If the original build pulled source from Cloud Storage and specified the + generation of the object, the new build will attempt to use the same + object, which may or may not be available depending on the bucket's + lifecycle management settings. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def CreateBuildTrigger(self, request, context): + """Creates a new `BuildTrigger`. + + This API is experimental. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetBuildTrigger(self, request, context): + """Returns information about a `BuildTrigger`. + + This API is experimental. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListBuildTriggers(self, request, context): + """Lists existing `BuildTrigger`s. + + This API is experimental. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteBuildTrigger(self, request, context): + """Deletes a `BuildTrigger` by its project ID and trigger ID. + + This API is experimental. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def UpdateBuildTrigger(self, request, context): + """Updates a `BuildTrigger` by its project ID and trigger ID. + + This API is experimental. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def RunBuildTrigger(self, request, context): + """Runs a `BuildTrigger` at a particular source revision. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def CreateWorkerPool(self, request, context): + """Creates a `WorkerPool` to run the builds, and returns the new worker pool. + + This API is experimental. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetWorkerPool(self, request, context): + """Returns information about a `WorkerPool`. + + This API is experimental. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteWorkerPool(self, request, context): + """Deletes a `WorkerPool` by its project ID and WorkerPool name. + + This API is experimental. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def UpdateWorkerPool(self, request, context): + """Update a `WorkerPool`. + + This API is experimental. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListWorkerPools(self, request, context): + """List project's `WorkerPool`s. + + This API is experimental. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_CloudBuildServicer_to_server(servicer, server): + rpc_method_handlers = { + "CreateBuild": grpc.unary_unary_rpc_method_handler( + servicer.CreateBuild, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.CreateBuildRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "GetBuild": grpc.unary_unary_rpc_method_handler( + servicer.GetBuild, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.GetBuildRequest.FromString, + response_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.Build.SerializeToString, + ), + "ListBuilds": grpc.unary_unary_rpc_method_handler( + servicer.ListBuilds, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.ListBuildsRequest.FromString, + response_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.ListBuildsResponse.SerializeToString, + ), + "CancelBuild": grpc.unary_unary_rpc_method_handler( + servicer.CancelBuild, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.CancelBuildRequest.FromString, + response_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.Build.SerializeToString, + ), + "RetryBuild": grpc.unary_unary_rpc_method_handler( + servicer.RetryBuild, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.RetryBuildRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "CreateBuildTrigger": grpc.unary_unary_rpc_method_handler( + servicer.CreateBuildTrigger, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.CreateBuildTriggerRequest.FromString, + response_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.BuildTrigger.SerializeToString, + ), + "GetBuildTrigger": grpc.unary_unary_rpc_method_handler( + servicer.GetBuildTrigger, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.GetBuildTriggerRequest.FromString, + response_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.BuildTrigger.SerializeToString, + ), + "ListBuildTriggers": grpc.unary_unary_rpc_method_handler( + servicer.ListBuildTriggers, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.ListBuildTriggersRequest.FromString, + response_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.ListBuildTriggersResponse.SerializeToString, + ), + "DeleteBuildTrigger": grpc.unary_unary_rpc_method_handler( + servicer.DeleteBuildTrigger, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.DeleteBuildTriggerRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "UpdateBuildTrigger": grpc.unary_unary_rpc_method_handler( + servicer.UpdateBuildTrigger, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.UpdateBuildTriggerRequest.FromString, + response_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.BuildTrigger.SerializeToString, + ), + "RunBuildTrigger": grpc.unary_unary_rpc_method_handler( + servicer.RunBuildTrigger, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.RunBuildTriggerRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "CreateWorkerPool": grpc.unary_unary_rpc_method_handler( + servicer.CreateWorkerPool, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.CreateWorkerPoolRequest.FromString, + response_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.WorkerPool.SerializeToString, + ), + "GetWorkerPool": grpc.unary_unary_rpc_method_handler( + servicer.GetWorkerPool, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.GetWorkerPoolRequest.FromString, + response_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.WorkerPool.SerializeToString, + ), + "DeleteWorkerPool": grpc.unary_unary_rpc_method_handler( + servicer.DeleteWorkerPool, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.DeleteWorkerPoolRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "UpdateWorkerPool": grpc.unary_unary_rpc_method_handler( + servicer.UpdateWorkerPool, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.UpdateWorkerPoolRequest.FromString, + response_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.WorkerPool.SerializeToString, + ), + "ListWorkerPools": grpc.unary_unary_rpc_method_handler( + servicer.ListWorkerPools, + request_deserializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.ListWorkerPoolsRequest.FromString, + response_serializer=google_dot_devtools_dot_cloudbuild__v1_dot_proto_dot_cloudbuild__pb2.ListWorkerPoolsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.devtools.cloudbuild.v1.CloudBuild", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/cloudbuild/google/cloud/devtools/cloudbuild_v1/types.py b/cloudbuild/google/cloud/devtools/cloudbuild_v1/types.py new file mode 100644 index 000000000000..7e81597fa21e --- /dev/null +++ b/cloudbuild/google/cloud/devtools/cloudbuild_v1/types.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import +import sys + +from google.api_core.protobuf_helpers import get_messages + +from google.cloud.devtools.cloudbuild_v1.proto import cloudbuild_pb2 +from google.longrunning import operations_pb2 +from google.protobuf import any_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import timestamp_pb2 +from google.rpc import status_pb2 + + +_shared_modules = [ + operations_pb2, + any_pb2, + duration_pb2, + empty_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [cloudbuild_pb2] + +names = [] + +for module in _shared_modules: # pragma: NO COVER + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) +for module in _local_modules: + for name, message in get_messages(module).items(): + message.__module__ = "google.cloud.devtools.cloudbuild_v1.types" + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/cloudbuild/noxfile.py b/cloudbuild/noxfile.py new file mode 100644 index 000000000000..7c40b781775f --- /dev/null +++ b/cloudbuild/noxfile.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import +import os +import shutil + +import nox + + +LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) +BLACK_VERSION = "black==19.3b0" +BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +if os.path.exists("samples"): + BLACK_PATHS.append("samples") + + +@nox.session(python="3.7") +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) + session.run("black", "--check", *BLACK_PATHS) + session.run("flake8", "google", "tests") + + +@nox.session(python="3.7") +def blacken(session): + """Run black. + + Format code to uniform standard. + + This currently uses Python 3.6 due to the automated Kokoro run of synthtool. + That run uses an image that doesn't have 3.6 installed. Before updating this + check the state of the `gcp_ubuntu_config` we use for that Kokoro run. + """ + session.install(BLACK_VERSION) + session.run("black", *BLACK_PATHS) + + +@nox.session(python="3.7") +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def default(session): + # Install all test dependencies, then install this package in-place. + session.install("mock", "pytest", "pytest-cov") + for local_dep in LOCAL_DEPS: + session.install("-e", local_dep) + session.install("-e", ".") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + "--cov=google.cloud", + "--cov=tests.unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=["2.7", "3.5", "3.6", "3.7"]) +def unit(session): + """Run the unit test suite.""" + default(session) + + +@nox.session(python=["2.7", "3.7"]) +def system(session): + """Run the system test suite.""" + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + # Sanity check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install("mock", "pytest") + for local_dep in LOCAL_DEPS: + session.install("-e", local_dep) + session.install("-e", "../test_utils/") + session.install("-e", ".") + + # Run py.test against the system tests. + if system_test_exists: + session.run("py.test", "--quiet", system_test_path, *session.posargs) + if system_test_folder_exists: + session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + + +@nox.session(python="3.7") +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=65") + + session.run("coverage", "erase") + + +@nox.session(python="3.7") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/cloudbuild/setup.cfg b/cloudbuild/setup.cfg new file mode 100644 index 000000000000..3bd555500e37 --- /dev/null +++ b/cloudbuild/setup.cfg @@ -0,0 +1,3 @@ +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/cloudbuild/setup.py b/cloudbuild/setup.py new file mode 100644 index 000000000000..c53b7b014b0b --- /dev/null +++ b/cloudbuild/setup.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os + +import setuptools + +name = "google-cloud-build" +description = "Google Cloud Build API client library" +version = "0.1.0" +# Should be one of: +# 'Development Status :: 3 - Alpha' +# 'Development Status :: 4 - Beta' +# 'Development Status :: 5 - Production/Stable' +release_status = "Development Status :: 3 - Alpha" +dependencies = [ + "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", + 'enum34; python_version < "3.4"', +] + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package for package in setuptools.find_packages() if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + +if "google.cloud.devtools" in packages: + namespaces.append("google.cloud.devtools") + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url="https://github.com/googleapis/google-cloud-python", + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/cloudbuild/synth.metadata b/cloudbuild/synth.metadata new file mode 100644 index 000000000000..e00caa5343ef --- /dev/null +++ b/cloudbuild/synth.metadata @@ -0,0 +1,39 @@ +{ + "updateTime": "2019-10-22T20:39:41.016824Z", + "sources": [ + { + "generator": { + "name": "artman", + "version": "0.40.2", + "dockerImage": "googleapis/artman@sha256:3b8f7d9b4c206843ce08053474f5c64ae4d388ff7d995e68b59fb65edf73eeb9" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "a9589347c884dd62c45b5852c1b86ff346bf98dc", + "internalRef": "276110444" + } + }, + { + "template": { + "name": "python_library", + "origin": "synthtool.gcp", + "version": "0.0.0+dev" + } + } + ], + "destinations": [ + { + "client": { + "source": "googleapis", + "apiName": "cloudbuild", + "apiVersion": "v1", + "language": "python", + "generator": "gapic", + "config": "google/devtools/cloudbuild/artman_cloudbuild.yaml" + } + } + ] +} \ No newline at end of file diff --git a/cloudbuild/synth.py b/cloudbuild/synth.py new file mode 100644 index 000000000000..d1760a0c46a3 --- /dev/null +++ b/cloudbuild/synth.py @@ -0,0 +1,87 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated parts of this library.""" + +import synthtool as s +from synthtool import gcp + +gapic = gcp.GAPICGenerator() +common = gcp.CommonTemplates() +version = 'v1' + +# ---------------------------------------------------------------------------- +# Generate cloudbuild GAPIC layer +# ---------------------------------------------------------------------------- +library = gapic.py_library( + 'cloudbuild', + version, + config_path='/google/devtools/cloudbuild/artman_cloudbuild.yaml', + generator_args=["--dev_samples"], + include_protos=True +) + +s.move( + library, + excludes=[ + 'docs/index.rst', + 'nox*.py', + 'setup.py', + 'setup.cfg', + '**/*.proto' + 'google/cloud/devtools/__init__.py' # declare this as a namespace package + ], +) + +# Move protos to the right directory +s.move(library / "google/cloud/cloudbuild_v1/proto/*.proto", "google/cloud/devtools/cloudbuild_v1/proto") + +# Fix up imports +s.replace( + ["google/**/*.py", "tests/unit/**/*.py"], + r"from google.devtools.cloudbuild_v1.proto import ", + r"from google.cloud.devtools.cloudbuild_v1.proto import ", +) + + +# Rename package to `google-cloud-build` +s.replace( + ["**/*.rst", "**/*.py", "**/*.md"], + "google-cloud-build", + "google-cloud-build" +) + +# Fix reference to product documentation +s.replace( + 'README.rst', + 'https://cloud\.google\.com/cloudbuild', + 'https://cloud.google.com/cloud-build' +) + +# Fix link to auth +s.replace( + 'README.rst', + 'https://googleapis\.github\.io/google-cloud-python/latest/core/auth\.html', + 'https://googleapis.dev/python/google-api-core/latest/auth.html' +) + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- + +# coverage level is low because of missing coverage for __init__.py files +templated_files = common.py_library(unit_cov_level=65, cov_level=65) +s.move(templated_files) + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/cloudbuild/tests/unit/gapic/v1/test_cloud_build_client_v1.py b/cloudbuild/tests/unit/gapic/v1/test_cloud_build_client_v1.py new file mode 100644 index 000000000000..d9757069cc90 --- /dev/null +++ b/cloudbuild/tests/unit/gapic/v1/test_cloud_build_client_v1.py @@ -0,0 +1,754 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests.""" + +import mock +import pytest + +from google.cloud.devtools import cloudbuild_v1 +from google.cloud.devtools.cloudbuild_v1.proto import cloudbuild_pb2 +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, method, request_serializer=None, response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestCloudBuildClient(object): + def test_create_build(self): + # Setup Expected Response + name = "name3373707" + done = True + expected_response = {"name": name, "done": done} + expected_response = operations_pb2.Operation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup Request + project_id = "projectId-1969970175" + build = {} + + response = client.create_build(project_id, build) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.CreateBuildRequest( + project_id=project_id, build=build + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_build_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup request + project_id = "projectId-1969970175" + build = {} + + with pytest.raises(CustomException): + client.create_build(project_id, build) + + def test_get_build(self): + # Setup Expected Response + id_2 = "id23227150" + project_id_2 = "projectId2939242356" + status_detail = "statusDetail2089931070" + logs_bucket = "logsBucket1565363834" + build_trigger_id = "buildTriggerId1105559411" + log_url = "logUrl342054388" + expected_response = { + "id": id_2, + "project_id": project_id_2, + "status_detail": status_detail, + "logs_bucket": logs_bucket, + "build_trigger_id": build_trigger_id, + "log_url": log_url, + } + expected_response = cloudbuild_pb2.Build(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup Request + project_id = "projectId-1969970175" + id_ = "id3355" + + response = client.get_build(project_id, id_) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.GetBuildRequest(project_id=project_id, id=id_) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_build_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup request + project_id = "projectId-1969970175" + id_ = "id3355" + + with pytest.raises(CustomException): + client.get_build(project_id, id_) + + def test_list_builds(self): + # Setup Expected Response + next_page_token = "" + builds_element = {} + builds = [builds_element] + expected_response = {"next_page_token": next_page_token, "builds": builds} + expected_response = cloudbuild_pb2.ListBuildsResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup Request + project_id = "projectId-1969970175" + + paged_list_response = client.list_builds(project_id) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.builds[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.ListBuildsRequest(project_id=project_id) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_builds_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup request + project_id = "projectId-1969970175" + + paged_list_response = client.list_builds(project_id) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_cancel_build(self): + # Setup Expected Response + id_2 = "id23227150" + project_id_2 = "projectId2939242356" + status_detail = "statusDetail2089931070" + logs_bucket = "logsBucket1565363834" + build_trigger_id = "buildTriggerId1105559411" + log_url = "logUrl342054388" + expected_response = { + "id": id_2, + "project_id": project_id_2, + "status_detail": status_detail, + "logs_bucket": logs_bucket, + "build_trigger_id": build_trigger_id, + "log_url": log_url, + } + expected_response = cloudbuild_pb2.Build(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup Request + project_id = "projectId-1969970175" + id_ = "id3355" + + response = client.cancel_build(project_id, id_) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.CancelBuildRequest( + project_id=project_id, id=id_ + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_cancel_build_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup request + project_id = "projectId-1969970175" + id_ = "id3355" + + with pytest.raises(CustomException): + client.cancel_build(project_id, id_) + + def test_create_build_trigger(self): + # Setup Expected Response + id_ = "id3355" + description = "description-1724546052" + name = "name3373707" + filename = "filename-734768633" + disabled = True + expected_response = { + "id": id_, + "description": description, + "name": name, + "filename": filename, + "disabled": disabled, + } + expected_response = cloudbuild_pb2.BuildTrigger(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup Request + project_id = "projectId-1969970175" + trigger = {} + + response = client.create_build_trigger(project_id, trigger) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.CreateBuildTriggerRequest( + project_id=project_id, trigger=trigger + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_build_trigger_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup request + project_id = "projectId-1969970175" + trigger = {} + + with pytest.raises(CustomException): + client.create_build_trigger(project_id, trigger) + + def test_get_build_trigger(self): + # Setup Expected Response + id_ = "id3355" + description = "description-1724546052" + name = "name3373707" + filename = "filename-734768633" + disabled = True + expected_response = { + "id": id_, + "description": description, + "name": name, + "filename": filename, + "disabled": disabled, + } + expected_response = cloudbuild_pb2.BuildTrigger(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup Request + project_id = "projectId-1969970175" + trigger_id = "triggerId1363517698" + + response = client.get_build_trigger(project_id, trigger_id) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.GetBuildTriggerRequest( + project_id=project_id, trigger_id=trigger_id + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_build_trigger_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup request + project_id = "projectId-1969970175" + trigger_id = "triggerId1363517698" + + with pytest.raises(CustomException): + client.get_build_trigger(project_id, trigger_id) + + def test_list_build_triggers(self): + # Setup Expected Response + next_page_token = "nextPageToken-1530815211" + expected_response = {"next_page_token": next_page_token} + expected_response = cloudbuild_pb2.ListBuildTriggersResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup Request + project_id = "projectId-1969970175" + + response = client.list_build_triggers(project_id) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.ListBuildTriggersRequest( + project_id=project_id + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_build_triggers_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup request + project_id = "projectId-1969970175" + + with pytest.raises(CustomException): + client.list_build_triggers(project_id) + + def test_delete_build_trigger(self): + channel = ChannelStub() + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup Request + project_id = "projectId-1969970175" + trigger_id = "triggerId1363517698" + + client.delete_build_trigger(project_id, trigger_id) + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.DeleteBuildTriggerRequest( + project_id=project_id, trigger_id=trigger_id + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_build_trigger_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup request + project_id = "projectId-1969970175" + trigger_id = "triggerId1363517698" + + with pytest.raises(CustomException): + client.delete_build_trigger(project_id, trigger_id) + + def test_update_build_trigger(self): + # Setup Expected Response + id_ = "id3355" + description = "description-1724546052" + name = "name3373707" + filename = "filename-734768633" + disabled = True + expected_response = { + "id": id_, + "description": description, + "name": name, + "filename": filename, + "disabled": disabled, + } + expected_response = cloudbuild_pb2.BuildTrigger(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup Request + project_id = "projectId-1969970175" + trigger_id = "triggerId1363517698" + trigger = {} + + response = client.update_build_trigger(project_id, trigger_id, trigger) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.UpdateBuildTriggerRequest( + project_id=project_id, trigger_id=trigger_id, trigger=trigger + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_build_trigger_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup request + project_id = "projectId-1969970175" + trigger_id = "triggerId1363517698" + trigger = {} + + with pytest.raises(CustomException): + client.update_build_trigger(project_id, trigger_id, trigger) + + def test_run_build_trigger(self): + # Setup Expected Response + name = "name3373707" + done = True + expected_response = {"name": name, "done": done} + expected_response = operations_pb2.Operation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup Request + project_id = "projectId-1969970175" + trigger_id = "triggerId1363517698" + source = {} + + response = client.run_build_trigger(project_id, trigger_id, source) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.RunBuildTriggerRequest( + project_id=project_id, trigger_id=trigger_id, source=source + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_run_build_trigger_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup request + project_id = "projectId-1969970175" + trigger_id = "triggerId1363517698" + source = {} + + with pytest.raises(CustomException): + client.run_build_trigger(project_id, trigger_id, source) + + def test_retry_build(self): + # Setup Expected Response + name = "name3373707" + done = True + expected_response = {"name": name, "done": done} + expected_response = operations_pb2.Operation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup Request + project_id = "projectId-1969970175" + id_ = "id3355" + + response = client.retry_build(project_id, id_) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.RetryBuildRequest( + project_id=project_id, id=id_ + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_retry_build_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + # Setup request + project_id = "projectId-1969970175" + id_ = "id3355" + + with pytest.raises(CustomException): + client.retry_build(project_id, id_) + + def test_create_worker_pool(self): + # Setup Expected Response + name = "name3373707" + project_id = "projectId-1969970175" + service_account_email = "serviceAccountEmail-1300473088" + worker_count = 372044046 + expected_response = { + "name": name, + "project_id": project_id, + "service_account_email": service_account_email, + "worker_count": worker_count, + } + expected_response = cloudbuild_pb2.WorkerPool(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + response = client.create_worker_pool() + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.CreateWorkerPoolRequest() + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_worker_pool_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + with pytest.raises(CustomException): + client.create_worker_pool() + + def test_get_worker_pool(self): + # Setup Expected Response + name = "name3373707" + project_id = "projectId-1969970175" + service_account_email = "serviceAccountEmail-1300473088" + worker_count = 372044046 + expected_response = { + "name": name, + "project_id": project_id, + "service_account_email": service_account_email, + "worker_count": worker_count, + } + expected_response = cloudbuild_pb2.WorkerPool(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + response = client.get_worker_pool() + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.GetWorkerPoolRequest() + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_worker_pool_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + with pytest.raises(CustomException): + client.get_worker_pool() + + def test_delete_worker_pool(self): + channel = ChannelStub() + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + client.delete_worker_pool() + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.DeleteWorkerPoolRequest() + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_worker_pool_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + with pytest.raises(CustomException): + client.delete_worker_pool() + + def test_update_worker_pool(self): + # Setup Expected Response + name = "name3373707" + project_id = "projectId-1969970175" + service_account_email = "serviceAccountEmail-1300473088" + worker_count = 372044046 + expected_response = { + "name": name, + "project_id": project_id, + "service_account_email": service_account_email, + "worker_count": worker_count, + } + expected_response = cloudbuild_pb2.WorkerPool(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + response = client.update_worker_pool() + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.UpdateWorkerPoolRequest() + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_worker_pool_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + with pytest.raises(CustomException): + client.update_worker_pool() + + def test_list_worker_pools(self): + # Setup Expected Response + expected_response = {} + expected_response = cloudbuild_pb2.ListWorkerPoolsResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + response = client.list_worker_pools() + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudbuild_pb2.ListWorkerPoolsRequest() + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_worker_pools_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = cloudbuild_v1.CloudBuildClient() + + with pytest.raises(CustomException): + client.list_worker_pools() diff --git a/container/docs/conf.py b/container/docs/conf.py index b303b529e88a..56dd626abca6 100644 --- a/container/docs/conf.py +++ b/container/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/containeranalysis/CHANGELOG.md b/containeranalysis/CHANGELOG.md index 70798d534fa9..2ea3a2c00eaf 100644 --- a/containeranalysis/CHANGELOG.md +++ b/containeranalysis/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## 0.3.1 + +11-07-2019 11:08 PST + +**NOTE**: Please upgrade to this version if you will also be using `google-cloud-build`. + +### Implementation Changes +- Make google.cloud.devtools a namespace ([#9606](https://github.com/googleapis/google-cloud-python/pull/9606)) + +### Documentation +- Change requests intersphinx ref (via synth) +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) + ## 0.3.0 08-12-2019 13:53 PDT diff --git a/containeranalysis/docs/conf.py b/containeranalysis/docs/conf.py index 4e3dd1dac2f6..142b347b9169 100644 --- a/containeranalysis/docs/conf.py +++ b/containeranalysis/docs/conf.py @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/containeranalysis/google/cloud/devtools/__init__.py b/containeranalysis/google/cloud/devtools/__init__.py index e69de29bb2d1..8fcc60e2b9c6 100644 --- a/containeranalysis/google/cloud/devtools/__init__.py +++ b/containeranalysis/google/cloud/devtools/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/containeranalysis/noxfile.py b/containeranalysis/noxfile.py index 7b81e2c3bcdc..019ad875f8b7 100644 --- a/containeranalysis/noxfile.py +++ b/containeranalysis/noxfile.py @@ -133,7 +133,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=51") + session.run("coverage", "report", "--show-missing", "--fail-under=45") session.run("coverage", "erase") diff --git a/containeranalysis/setup.py b/containeranalysis/setup.py index d92c135af22c..47054094b76c 100644 --- a/containeranalysis/setup.py +++ b/containeranalysis/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-containeranalysis" description = "Container Analysis API API client library" -version = "0.3.0" +version = "0.3.1" release_status = "Development Status :: 3 - Alpha" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", @@ -45,7 +45,8 @@ namespaces = ["google"] if "google.cloud" in packages: namespaces.append("google.cloud") - +if "google.cloud.devtools" in packages: + namespaces.append("google.cloud.devtools") setuptools.setup( name=name, diff --git a/containeranalysis/synth.metadata b/containeranalysis/synth.metadata index 1edc47ed9c3d..e5758bd8df5e 100644 --- a/containeranalysis/synth.metadata +++ b/containeranalysis/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:18:03.197749Z", + "updateTime": "2019-11-05T21:33:52.199324Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.41.0", + "dockerImage": "googleapis/artman@sha256:75b38a3b073a7b243545f2332463096624c802bb1e56b8cb6f22ba1ecd325fa9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "cb542d6f5f1c9431ec4181d9cfd7f8d8c953e60b", + "internalRef": "278688708" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/containeranalysis/synth.py b/containeranalysis/synth.py index 35c1978464f2..9d1c6d3d6945 100644 --- a/containeranalysis/synth.py +++ b/containeranalysis/synth.py @@ -33,30 +33,42 @@ include_protos=True, ) -excludes = ["nox.py", "setup.py", "google/cloud/containeranalysis_v1/proto", "README.rst", "docs/index.rst"] +excludes = [ + "nox.py", + "setup.py", + "google/cloud/containeranalysis_v1/proto", + "google/cloud/devtools/__init__.py", # other packages also use this namespace + "README.rst", + "docs/index.rst", +] s.move(library, excludes=excludes) # .proto files end up in the wrong place by default -s.move(library / "google/cloud/containeranalysis_v1/proto", "google/cloud/devtools/containeranalysis_v1/proto") - +s.move( + library / "google/cloud/containeranalysis_v1/proto", + "google/cloud/devtools/containeranalysis_v1/proto", +) # Insert helper method to get grafeas client -s.replace("google/**/container_analysis_client.py", -r"""_GAPIC_LIBRARY_VERSION = pkg_resources\.get_distribution\( +s.replace( + "google/**/container_analysis_client.py", + r"""_GAPIC_LIBRARY_VERSION = pkg_resources\.get_distribution\( 'google-cloud-containeranalysis', \)\.version""", -r"""from grafeas import grafeas_v1 + r"""from grafeas import grafeas_v1 from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( "google-cloud-containeranalysis" ).version -""") +""", +) -s.replace("google/**/container_analysis_client.py", -r''' \# Service calls - def set_iam_policy\(''', -r''' def get_grafeas_client(self): +s.replace( + "google/**/container_analysis_client.py", + r""" \# Service calls + def set_iam_policy\(""", + r''' def get_grafeas_client(self): """Returns an equivalent grafeas client. Returns: @@ -69,11 +81,12 @@ def set_iam_policy\(''', return grafeas_v1.GrafeasClient(grafeas_transport) # Service calls - def set_iam_policy(''') + def set_iam_policy(''', +) # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=51, cov_level=51) +templated_files = common.py_library(unit_cov_level=45, cov_level=45) s.move(templated_files) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/core/docs/conf.py b/core/docs/conf.py index 893752870198..ca4685c59ea9 100644 --- a/core/docs/conf.py +++ b/core/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/core/google/cloud/client.py b/core/google/cloud/client.py index 2623d030f00e..f204ba8e91c1 100644 --- a/core/google/cloud/client.py +++ b/core/google/cloud/client.py @@ -29,7 +29,7 @@ _GOOGLE_AUTH_CREDENTIALS_HELP = ( "This library only supports credentials from google-auth-library-python. " - "See https://google-cloud-python.readthedocs.io/en/latest/core/auth.html " + "See https://google-auth.readthedocs.io/en/latest/ " "for help on authentication with this library." ) diff --git a/datacatalog/CHANGELOG.md b/datacatalog/CHANGELOG.md index f8cce1a5f3f5..7ae6b349db50 100644 --- a/datacatalog/CHANGELOG.md +++ b/datacatalog/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-datacatalog/#history +## 0.4.0 + +10-23-2019 08:54 PDT + +### Implementation Changes + +- remove send/recv msg size limit (via synth) ([#8949](https://github.com/googleapis/google-cloud-python/pull/8949)) + +### New Features + +- add entry group operations ([#9520](https://github.com/googleapis/google-cloud-python/pull/9520)) + +### Documentation + +- fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- remove unused import from samples (via synth). ([#9110](https://github.com/googleapis/google-cloud-python/pull/9110)) +- remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) +- add 'search' sample (via synth). ([#8793](https://github.com/googleapis/google-cloud-python/pull/8793)) + ## 0.3.0 07-24-2019 15:58 PDT diff --git a/datacatalog/docs/conf.py b/datacatalog/docs/conf.py index 194ca0394eeb..bc2c56e8f164 100644 --- a/datacatalog/docs/conf.py +++ b/datacatalog/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client.py index 3be193c2d2f0..cea50cd34f7f 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client.py @@ -96,6 +96,16 @@ def entry_path(cls, project, location, entry_group, entry): entry=entry, ) + @classmethod + def entry_group_path(cls, project, location, entry_group): + """Return a fully-qualified entry_group string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/entryGroups/{entry_group}", + project=project, + location=location, + entry_group=entry_group, + ) + @classmethod def field_path(cls, project, location, tag_template, field): """Return a fully-qualified field string.""" @@ -268,7 +278,7 @@ def search_catalog( This is a custom method (https://cloud.google.com/apis/design/custom\_methods) and does not return the complete resource, only the resource identifier and high - level fields. Clients can subsequentally call Get methods. + level fields. Clients can subsequentally call ``Get`` methods. Note that searches do not have full recall. There may be results that match your query but are not returned, even in subsequent pages of @@ -330,15 +340,13 @@ def search_catalog( order_by (str): Specifies the ordering of results, currently supported case-sensitive choices are: - .. raw:: html + - ``relevance``, only supports desecending + - ``last_access_timestamp [asc|desc]``, defaults to descending if not + specified + - ``last_modified_timestamp [asc|desc]``, defaults to descending if not + specified -
    -
  • relevance
  • -
  • last_access_timestamp [asc|desc], defaults to descending if not - specified,
  • -
  • last_modified_timestamp [asc|desc], defaults to descending if not - specified.
  • -
+ If not specified, defaults to ``relevance`` descending. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -390,6 +398,338 @@ def search_catalog( ) return iterator + def create_entry_group( + self, + parent, + entry_group_id, + entry_group, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Alpha feature. Creates an EntryGroup. The user should enable the Data + Catalog API in the project identified by the ``parent`` parameter (see + [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + + Example: + >>> from google.cloud import datacatalog_v1beta1 + >>> + >>> client = datacatalog_v1beta1.DataCatalogClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize `entry_group_id`: + >>> entry_group_id = '' + >>> + >>> # TODO: Initialize `entry_group`: + >>> entry_group = {} + >>> + >>> response = client.create_entry_group(parent, entry_group_id, entry_group) + + Args: + parent (str): Required. The name of the project this entry group is in. Example: + + - projects/{project\_id}/locations/{location} + + Note that this EntryGroup and its child resources may not actually be + stored in the location in this name. + entry_group_id (str): Required. The id of the entry group to create. + entry_group (Union[dict, ~google.cloud.datacatalog_v1beta1.types.EntryGroup]): The entry group to create. Defaults to an empty entry group. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datacatalog_v1beta1.types.EntryGroup` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datacatalog_v1beta1.types.EntryGroup` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_entry_group" not in self._inner_api_calls: + self._inner_api_calls[ + "create_entry_group" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_entry_group, + default_retry=self._method_configs["CreateEntryGroup"].retry, + default_timeout=self._method_configs["CreateEntryGroup"].timeout, + client_info=self._client_info, + ) + + request = datacatalog_pb2.CreateEntryGroupRequest( + parent=parent, entry_group_id=entry_group_id, entry_group=entry_group + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["create_entry_group"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_entry_group( + self, + name, + read_mask=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Alpha feature. + Gets an EntryGroup. + + Example: + >>> from google.cloud import datacatalog_v1beta1 + >>> + >>> client = datacatalog_v1beta1.DataCatalogClient() + >>> + >>> name = client.entry_group_path('[PROJECT]', '[LOCATION]', '[ENTRY_GROUP]') + >>> + >>> response = client.get_entry_group(name) + + Args: + name (str): Required. The name of the entry group. For example, + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. + read_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): The fields to return. If not set or empty, all fields are returned. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datacatalog_v1beta1.types.EntryGroup` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_entry_group" not in self._inner_api_calls: + self._inner_api_calls[ + "get_entry_group" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_entry_group, + default_retry=self._method_configs["GetEntryGroup"].retry, + default_timeout=self._method_configs["GetEntryGroup"].timeout, + client_info=self._client_info, + ) + + request = datacatalog_pb2.GetEntryGroupRequest(name=name, read_mask=read_mask) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_entry_group"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_entry_group( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Alpha feature. Deletes an EntryGroup. Only entry groups that do not + contain entries can be deleted. The user should enable the Data Catalog + API in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] (/data-catalog/docs/concepts/resource-project) + for more information). + + Example: + >>> from google.cloud import datacatalog_v1beta1 + >>> + >>> client = datacatalog_v1beta1.DataCatalogClient() + >>> + >>> name = client.entry_group_path('[PROJECT]', '[LOCATION]', '[ENTRY_GROUP]') + >>> + >>> client.delete_entry_group(name) + + Args: + name (str): Required. The name of the entry group. For example, + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_entry_group" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_entry_group" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_entry_group, + default_retry=self._method_configs["DeleteEntryGroup"].retry, + default_timeout=self._method_configs["DeleteEntryGroup"].timeout, + client_info=self._client_info, + ) + + request = datacatalog_pb2.DeleteEntryGroupRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + self._inner_api_calls["delete_entry_group"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def create_entry( + self, + parent, + entry_id, + entry, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Alpha feature. Creates an entry. Currently only entries of 'FILESET' + type can be created. The user should enable the Data Catalog API in the + project identified by the ``parent`` parameter (see [Data Catalog + Resource Project] (/data-catalog/docs/concepts/resource-project) for + more information). + + Example: + >>> from google.cloud import datacatalog_v1beta1 + >>> + >>> client = datacatalog_v1beta1.DataCatalogClient() + >>> + >>> parent = client.entry_group_path('[PROJECT]', '[LOCATION]', '[ENTRY_GROUP]') + >>> + >>> # TODO: Initialize `entry_id`: + >>> entry_id = '' + >>> + >>> # TODO: Initialize `entry`: + >>> entry = {} + >>> + >>> response = client.create_entry(parent, entry_id, entry) + + Args: + parent (str): Required. The name of the entry group this entry is in. Example: + + - projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id} + + Note that this Entry and its child resources may not actually be stored + in the location in this name. + entry_id (str): Required. The id of the entry to create. + entry (Union[dict, ~google.cloud.datacatalog_v1beta1.types.Entry]): Required. The entry to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datacatalog_v1beta1.types.Entry` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datacatalog_v1beta1.types.Entry` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_entry" not in self._inner_api_calls: + self._inner_api_calls[ + "create_entry" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_entry, + default_retry=self._method_configs["CreateEntry"].retry, + default_timeout=self._method_configs["CreateEntry"].timeout, + client_info=self._client_info, + ) + + request = datacatalog_pb2.CreateEntryRequest( + parent=parent, entry_id=entry_id, entry=entry + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["create_entry"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + def update_entry( self, entry, @@ -399,7 +739,10 @@ def update_entry( metadata=None, ): """ - Updates an existing entry. + Updates an existing entry. The user should enable the Data Catalog API + in the project identified by the ``entry.name`` parameter (see [Data + Catalog Resource Project] (/data-catalog/docs/concepts/resource-project) + for more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -412,22 +755,26 @@ def update_entry( >>> response = client.update_entry(entry) Args: - entry (Union[dict, ~google.cloud.datacatalog_v1beta1.types.Entry]): Required. The updated Entry. + entry (Union[dict, ~google.cloud.datacatalog_v1beta1.types.Entry]): Required. The updated entry. The "name" field must be set. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.Entry` - update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): Optional. The fields to update on the entry. If absent or empty, all - modifiable fields are updated. + update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): The fields to update on the entry. If absent or empty, all modifiable + fields are updated. - Modifiable fields in synced entries: + The following fields are modifiable: - 1. schema (Pub/Sub topics only) + - For entries with type ``DATA_STREAM``: - Modifiable fields in native entries: + - ``schema`` - 1. display\_name - 2. description - 3. schema + - For entries with type ``FILESET`` + + - ``schema`` + - ``display_name`` + - ``description`` + - ``gcs_fileset_spec`` + - ``gcs_fileset_spec.file_patterns`` If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` @@ -481,6 +828,78 @@ def update_entry( request, retry=retry, timeout=timeout, metadata=metadata ) + def delete_entry( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Alpha feature. Deletes an existing entry. Only entries created through + ``CreateEntry`` method can be deleted. The user should enable the Data + Catalog API in the project identified by the ``name`` parameter (see + [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + + Example: + >>> from google.cloud import datacatalog_v1beta1 + >>> + >>> client = datacatalog_v1beta1.DataCatalogClient() + >>> + >>> name = client.entry_path('[PROJECT]', '[LOCATION]', '[ENTRY_GROUP]', '[ENTRY]') + >>> + >>> client.delete_entry(name) + + Args: + name (str): Required. The name of the entry. Example: + + - projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id} + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_entry" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_entry" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_entry, + default_retry=self._method_configs["DeleteEntry"].retry, + default_timeout=self._method_configs["DeleteEntry"].timeout, + client_info=self._client_info, + ) + + request = datacatalog_pb2.DeleteEntryRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + self._inner_api_calls["delete_entry"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + def get_entry( self, name, @@ -501,8 +920,14 @@ def get_entry( >>> response = client.get_entry(name) Args: - name (str): Required. The name of the entry. For example, - "projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id}". + name (str): Required. The name of the entry. Example: + + - projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id} + + Entry groups are logical groupings of entries. Currently, users cannot + create/modify entry groups. They are created by Data Catalog; they + include ``@bigquery`` for all BigQuery entries, and ``@pubsub`` for all + Cloud Pub/Sub entries. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -574,26 +999,24 @@ def lookup_entry( Args: linked_resource (str): The full name of the Google Cloud Platform resource the Data Catalog entry represents. See: - https://cloud.google.com/apis/design/resource\_names#full\_resource\_name + https://cloud.google.com/apis/design/resource\_names#full\_resource\_name. Full names are case-sensitive. Examples: - "//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId". - "//pubsub.googleapis.com/projects/projectId/topics/topicId" + + - //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId + - //pubsub.googleapis.com/projects/projectId/topics/topicId sql_resource (str): The SQL name of the entry. SQL names are case-sensitive. Examples: - .. raw:: html + - ``cloud_pubsub.project_id.topic_id`` + - ``pubsub.project_id.`topic.id.with.dots``` + - ``bigquery.project_id.dataset_id.table_id`` + - ``datacatalog.project_id.location_id.entry_group_id.entry_id`` -
    -
  • cloud_pubsub.project_id.topic_id
  • -
  • pubsub.project_id.`topic.id.with.dots`
  • -
  • bigquery.project_id.dataset_id.table_id
  • -
  • datacatalog.project_id.location_id.entry_group_id.entry_id
  • -
- *_ids shoud satisfy the standard SQL rules for identifiers. - https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical + ``*_id``\ s shoud satisfy the standard SQL rules for identifiers. + https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -647,7 +1070,11 @@ def create_tag_template( metadata=None, ): """ - Creates a tag template. + Creates a tag template. The user should enable the Data Catalog API in + the project identified by the ``parent`` parameter (see `Data Catalog + Resource + Project `__ + for more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -666,7 +1093,10 @@ def create_tag_template( Args: parent (str): Required. The name of the project and the location this template is in. - Example: "projects/{project\_id}/locations/{location}". Note that this + Example: + + - projects/{project\_id}/locations/{location} + TagTemplate and its child resources may not actually be stored in the location in this name. tag_template_id (str): Required. The id of the tag template to create. @@ -744,8 +1174,9 @@ def get_tag_template( >>> response = client.get_tag_template(name) Args: - name (str): Required. The name of the tag template. For example, - "projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}". + name (str): Required. The name of the tag template. Example: + + - projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id} retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -803,9 +1234,13 @@ def update_tag_template( metadata=None, ): """ - Updates a tag template. This method cannot be used to update the fields of - a template. The tag template fields are represented as separate resources - and should be updated using their own create/update/delete methods. + Updates a tag template. This method cannot be used to update the fields + of a template. The tag template fields are represented as separate + resources and should be updated using their own create/update/delete + methods. The user should enable the Data Catalog API in the project + identified by the ``tag_template.name`` parameter (see [Data Catalog + Resource Project] (/data-catalog/docs/concepts/resource-project) for + more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -818,19 +1253,17 @@ def update_tag_template( >>> response = client.update_tag_template(tag_template) Args: - tag_template (Union[dict, ~google.cloud.datacatalog_v1beta1.types.TagTemplate]): Required. The template to update. + tag_template (Union[dict, ~google.cloud.datacatalog_v1beta1.types.TagTemplate]): Required. The template to update. The "name" field must be set. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.TagTemplate` - update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): Optional. The field mask specifies the parts of the template to - overwrite. + update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): The field mask specifies the parts of the template to overwrite. Allowed fields: - - display\_name + - ``display_name`` - If update\_mask is omitted, all of the allowed fields above will be - updated. + If absent or empty, all of the allowed fields above will be updated. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` @@ -893,7 +1326,10 @@ def delete_tag_template( metadata=None, ): """ - Deletes a tag template and all tags using the template. + Deletes a tag template and all tags using the template. The user should + enable the Data Catalog API in the project identified by the ``name`` + parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -908,11 +1344,12 @@ def delete_tag_template( >>> client.delete_tag_template(name, force) Args: - name (str): Required. The name of the tag template to delete. For example, - "projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}". - force (bool): Required. Currently, this field must always be set to true. - This confirms the deletion of any possible tags using this template. - force = false will be supported in the future. + name (str): Required. The name of the tag template to delete. Example: + + - projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id} + force (bool): Required. Currently, this field must always be set to ``true``. This + confirms the deletion of any possible tags using this template. + ``force = false`` will be supported in the future. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -968,7 +1405,11 @@ def create_tag_template_field( metadata=None, ): """ - Creates a field in a tag template. + Creates a field in a tag template. The user should enable the Data + Catalog API in the project identified by the ``parent`` parameter (see + `Data Catalog Resource + Project `__ + for more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -987,14 +1428,16 @@ def create_tag_template_field( Args: parent (str): Required. The name of the project this template is in. Example: - "projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}". + + - projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id} + Note that this TagTemplateField may not actually be stored in the location in this name. - tag_template_field_id (str): Required. The id of the tag template field to create. Field ids can + tag_template_field_id (str): Required. The ID of the tag template field to create. Field ids can contain letters (both uppercase and lowercase), numbers (0-9), - underscores (\_) and dashes (-). Field ids must be at least 1 character - long and at most 128 characters long. Field ids must also be unique to - their template. + underscores (\_) and dashes (-). Field IDs must be at least 1 character + long and at most 128 characters long. Field IDs must also be unique + within their template. tag_template_field (Union[dict, ~google.cloud.datacatalog_v1beta1.types.TagTemplateField]): Required. The tag template field to create. If a dict is provided, it must be of the same form as the protobuf @@ -1061,8 +1504,11 @@ def update_tag_template_field( metadata=None, ): """ - Updates a field in a tag template. This method cannot be used to update the - field type. + Updates a field in a tag template. This method cannot be used to update + the field type. The user should enable the Data Catalog API in the + project identified by the ``name`` parameter (see [Data Catalog Resource + Project] (/data-catalog/docs/concepts/resource-project) for more + information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -1077,20 +1523,21 @@ def update_tag_template_field( >>> response = client.update_tag_template_field(name, tag_template_field) Args: - name (str): Required. The name of the tag template field. For example, - "projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}/fields/{tag\_template\_field\_id}". + name (str): Required. The name of the tag template field. Example: + + - projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}/fields/{tag\_template\_field\_id} tag_template_field (Union[dict, ~google.cloud.datacatalog_v1beta1.types.TagTemplateField]): Required. The template to update. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.TagTemplateField` - update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): Optional. The field mask specifies the parts of the template to - overwrite. Allowed fields: + update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): The field mask specifies the parts of the template to be updated. + Allowed fields: - - display\_name - - type.enum\_type + - ``display_name`` + - ``type.enum_type`` - If update\_mask is omitted, all of the allowed fields above will be - updated. + If ``update_mask`` is not set or empty, all of the allowed fields above + will be updated. When updating an enum type, the provided values will be merged with the existing values. Therefore, enum values can only be added, existing enum @@ -1157,7 +1604,11 @@ def rename_tag_template_field( metadata=None, ): """ - Renames a field in a tag template. + Renames a field in a tag template. The user should enable the Data + Catalog API in the project identified by the ``name`` parameter (see + `Data Catalog Resource + Project `__ + for more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -1172,10 +1623,11 @@ def rename_tag_template_field( >>> response = client.rename_tag_template_field(name, new_tag_template_field_id) Args: - name (str): Required. The name of the tag template. For example, - "projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}/fields/{tag\_template\_field\_id}". + name (str): Required. The name of the tag template. Example: + + - projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}/fields/{tag\_template\_field\_id} new_tag_template_field_id (str): Required. The new ID of this tag template field. For example, - "my\_new\_field". + ``my_new_field``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -1235,7 +1687,10 @@ def delete_tag_template_field( metadata=None, ): """ - Deletes a field in a tag template and all uses of that field. + Deletes a field in a tag template and all uses of that field. The user + should enable the Data Catalog API in the project identified by the + ``name`` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -1250,11 +1705,12 @@ def delete_tag_template_field( >>> client.delete_tag_template_field(name, force) Args: - name (str): Required. The name of the tag template field to delete. For example, - "projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}/fields/{tag\_template\_field\_id}". - force (bool): Required. Currently, this field must always be set to true. - This confirms the deletion of this field from any tags using this field. - force = false will be supported in the future. + name (str): Required. The name of the tag template field to delete. Example: + + - projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}/fields/{tag\_template\_field\_id} + force (bool): Required. Currently, this field must always be set to ``true``. This + confirms the deletion of this field from any tags using this field. + ``force = false`` will be supported in the future. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -1309,7 +1765,12 @@ def create_tag( metadata=None, ): """ - Creates a tag on an ``Entry``. + Creates a tag on an ``Entry``. Note: The project identified by the + ``parent`` parameter for the + `tag `__ + and the `tag + template `__ + used to create the tag must be from the same organization. Example: >>> from google.cloud import datacatalog_v1beta1 @@ -1325,8 +1786,10 @@ def create_tag( Args: parent (str): Required. The name of the resource to attach this tag to. Tags can be - attached to Entries. (example: - "projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id}"). + attached to Entries. Example: + + - projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id} + Note that this Tag and its child resources may not actually be stored in the location in this name. tag (Union[dict, ~google.cloud.datacatalog_v1beta1.types.Tag]): Required. The tag to create. @@ -1403,13 +1866,13 @@ def update_tag( >>> response = client.update_tag(tag) Args: - tag (Union[dict, ~google.cloud.datacatalog_v1beta1.types.Tag]): Required. The updated tag. + tag (Union[dict, ~google.cloud.datacatalog_v1beta1.types.Tag]): Required. The updated tag. The "name" field must be set. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.Tag` - update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): Optional. The fields to update on the Tag. If absent or empty, all - modifiable fields are updated. Currently the only modifiable field is - the field ``fields``. + update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): The fields to update on the Tag. If absent or empty, all modifiable + fields are updated. Currently the only modifiable field is the field + ``fields``. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` @@ -1481,8 +1944,9 @@ def delete_tag( >>> client.delete_tag(name) Args: - name (str): Required. The name of the tag to delete. For example, - "projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id}/tags/{tag\_id}". + name (str): Required. The name of the tag to delete. Example: + + - projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id}/tags/{tag\_id} retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -1642,13 +2106,19 @@ def set_iam_policy( Sets the access control policy for a resource. Replaces any existing policy. Supported resources are: - - Tag templates. Note, this method cannot be used to manage policies - for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for + BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. Callers must have following Google IAM permission - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on tag - templates. + + - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on tag + templates. + - ``datacatalog.entries.setIamPolicy`` to set policies on entries. + - ``datacatalog.entryGroups.setIamPolicy`` to set policies on entry + groups. Example: >>> from google.cloud import datacatalog_v1beta1 @@ -1735,13 +2205,19 @@ def get_iam_policy( Supported resources are: - - Tag templates. Note, this method cannot be used to manage policies - for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for + BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. Callers must have following Google IAM permission - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on tag - templates. + + - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on tag + templates. + - ``datacatalog.entries.getIamPolicy`` to get policies on entries. + - ``datacatalog.entryGroups.getIamPolicy`` to get policies on entry + groups. Example: >>> from google.cloud import datacatalog_v1beta1 @@ -1823,10 +2299,12 @@ def test_iam_permissions( exist, an empty set of permissions is returned (We don't return a ``NOT_FOUND`` error). - Supported resource are: + Supported resources are: - - tag templates. Note, this method cannot be used to manage policies - for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for + BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. A caller is not required to have Google IAM permission to make this diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client_config.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client_config.py index a1bda46164f0..065f9970864c 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client_config.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client_config.py @@ -22,11 +22,36 @@ "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, + "CreateEntryGroup": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetEntryGroup": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteEntryGroup": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "CreateEntry": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, "UpdateEntry": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, + "DeleteEntry": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, "GetEntry": { "timeout_millis": 60000, "retry_codes_name": "idempotent", diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/enums.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/enums.py index 954b282e5021..de76fc9f8a3b 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/enums.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/enums.py @@ -21,19 +21,24 @@ class EntryType(enum.IntEnum): """ - Entry resources in Data Catalog can be of different types e.g. BigQuery - Table entry is of type 'TABLE'. This enum describes all the possible types - Data Catalog contains. + Entry resources in Data Catalog can be of different types e.g. a + BigQuery Table entry is of type ``TABLE``. This enum describes all the + possible types Data Catalog contains. Attributes: ENTRY_TYPE_UNSPECIFIED (int): Default unknown type - TABLE (int): The type of entry that has a GoogleSQL schema, including logical views. - DATA_STREAM (int): An entry type which is used for streaming entries. Example - Pub/Sub. + TABLE (int): Output only. The type of entry that has a GoogleSQL schema, including + logical views. + DATA_STREAM (int): Output only. An entry type which is used for streaming entries. Example: + Cloud Pub/Sub topic. + FILESET (int): Alpha feature. An entry type which is a set of files or objects. Example: + Cloud Storage fileset. """ ENTRY_TYPE_UNSPECIFIED = 0 TABLE = 2 DATA_STREAM = 3 + FILESET = 4 class SearchResultType(enum.IntEnum): diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/data_catalog_grpc_transport.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/data_catalog_grpc_transport.py index 5f1e9639e7a6..2cbeb340f133 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/data_catalog_grpc_transport.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/data_catalog_grpc_transport.py @@ -117,7 +117,7 @@ def search_catalog(self): This is a custom method (https://cloud.google.com/apis/design/custom\_methods) and does not return the complete resource, only the resource identifier and high - level fields. Clients can subsequentally call Get methods. + level fields. Clients can subsequentally call ``Get`` methods. Note that searches do not have full recall. There may be results that match your query but are not returned, even in subsequent pages of @@ -135,11 +135,78 @@ def search_catalog(self): """ return self._stubs["data_catalog_stub"].SearchCatalog + @property + def create_entry_group(self): + """Return the gRPC stub for :meth:`DataCatalogClient.create_entry_group`. + + Alpha feature. Creates an EntryGroup. The user should enable the Data + Catalog API in the project identified by the ``parent`` parameter (see + [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_catalog_stub"].CreateEntryGroup + + @property + def get_entry_group(self): + """Return the gRPC stub for :meth:`DataCatalogClient.get_entry_group`. + + Alpha feature. + Gets an EntryGroup. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_catalog_stub"].GetEntryGroup + + @property + def delete_entry_group(self): + """Return the gRPC stub for :meth:`DataCatalogClient.delete_entry_group`. + + Alpha feature. Deletes an EntryGroup. Only entry groups that do not + contain entries can be deleted. The user should enable the Data Catalog + API in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] (/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_catalog_stub"].DeleteEntryGroup + + @property + def create_entry(self): + """Return the gRPC stub for :meth:`DataCatalogClient.create_entry`. + + Alpha feature. Creates an entry. Currently only entries of 'FILESET' + type can be created. The user should enable the Data Catalog API in the + project identified by the ``parent`` parameter (see [Data Catalog + Resource Project] (/data-catalog/docs/concepts/resource-project) for + more information). + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_catalog_stub"].CreateEntry + @property def update_entry(self): """Return the gRPC stub for :meth:`DataCatalogClient.update_entry`. - Updates an existing entry. + Updates an existing entry. The user should enable the Data Catalog API + in the project identified by the ``entry.name`` parameter (see [Data + Catalog Resource Project] (/data-catalog/docs/concepts/resource-project) + for more information). Returns: Callable: A callable which accepts the appropriate @@ -148,6 +215,23 @@ def update_entry(self): """ return self._stubs["data_catalog_stub"].UpdateEntry + @property + def delete_entry(self): + """Return the gRPC stub for :meth:`DataCatalogClient.delete_entry`. + + Alpha feature. Deletes an existing entry. Only entries created through + ``CreateEntry`` method can be deleted. The user should enable the Data + Catalog API in the project identified by the ``name`` parameter (see + [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_catalog_stub"].DeleteEntry + @property def get_entry(self): """Return the gRPC stub for :meth:`DataCatalogClient.get_entry`. @@ -180,7 +264,11 @@ def lookup_entry(self): def create_tag_template(self): """Return the gRPC stub for :meth:`DataCatalogClient.create_tag_template`. - Creates a tag template. + Creates a tag template. The user should enable the Data Catalog API in + the project identified by the ``parent`` parameter (see `Data Catalog + Resource + Project `__ + for more information). Returns: Callable: A callable which accepts the appropriate @@ -206,9 +294,13 @@ def get_tag_template(self): def update_tag_template(self): """Return the gRPC stub for :meth:`DataCatalogClient.update_tag_template`. - Updates a tag template. This method cannot be used to update the fields of - a template. The tag template fields are represented as separate resources - and should be updated using their own create/update/delete methods. + Updates a tag template. This method cannot be used to update the fields + of a template. The tag template fields are represented as separate + resources and should be updated using their own create/update/delete + methods. The user should enable the Data Catalog API in the project + identified by the ``tag_template.name`` parameter (see [Data Catalog + Resource Project] (/data-catalog/docs/concepts/resource-project) for + more information). Returns: Callable: A callable which accepts the appropriate @@ -221,7 +313,10 @@ def update_tag_template(self): def delete_tag_template(self): """Return the gRPC stub for :meth:`DataCatalogClient.delete_tag_template`. - Deletes a tag template and all tags using the template. + Deletes a tag template and all tags using the template. The user should + enable the Data Catalog API in the project identified by the ``name`` + parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). Returns: Callable: A callable which accepts the appropriate @@ -234,7 +329,11 @@ def delete_tag_template(self): def create_tag_template_field(self): """Return the gRPC stub for :meth:`DataCatalogClient.create_tag_template_field`. - Creates a field in a tag template. + Creates a field in a tag template. The user should enable the Data + Catalog API in the project identified by the ``parent`` parameter (see + `Data Catalog Resource + Project `__ + for more information). Returns: Callable: A callable which accepts the appropriate @@ -247,8 +346,11 @@ def create_tag_template_field(self): def update_tag_template_field(self): """Return the gRPC stub for :meth:`DataCatalogClient.update_tag_template_field`. - Updates a field in a tag template. This method cannot be used to update the - field type. + Updates a field in a tag template. This method cannot be used to update + the field type. The user should enable the Data Catalog API in the + project identified by the ``name`` parameter (see [Data Catalog Resource + Project] (/data-catalog/docs/concepts/resource-project) for more + information). Returns: Callable: A callable which accepts the appropriate @@ -261,7 +363,11 @@ def update_tag_template_field(self): def rename_tag_template_field(self): """Return the gRPC stub for :meth:`DataCatalogClient.rename_tag_template_field`. - Renames a field in a tag template. + Renames a field in a tag template. The user should enable the Data + Catalog API in the project identified by the ``name`` parameter (see + `Data Catalog Resource + Project `__ + for more information). Returns: Callable: A callable which accepts the appropriate @@ -274,7 +380,10 @@ def rename_tag_template_field(self): def delete_tag_template_field(self): """Return the gRPC stub for :meth:`DataCatalogClient.delete_tag_template_field`. - Deletes a field in a tag template and all uses of that field. + Deletes a field in a tag template and all uses of that field. The user + should enable the Data Catalog API in the project identified by the + ``name`` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). Returns: Callable: A callable which accepts the appropriate @@ -287,7 +396,12 @@ def delete_tag_template_field(self): def create_tag(self): """Return the gRPC stub for :meth:`DataCatalogClient.create_tag`. - Creates a tag on an ``Entry``. + Creates a tag on an ``Entry``. Note: The project identified by the + ``parent`` parameter for the + `tag `__ + and the `tag + template `__ + used to create the tag must be from the same organization. Returns: Callable: A callable which accepts the appropriate @@ -342,13 +456,19 @@ def set_iam_policy(self): Sets the access control policy for a resource. Replaces any existing policy. Supported resources are: - - Tag templates. Note, this method cannot be used to manage policies - for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for + BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. Callers must have following Google IAM permission - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on tag - templates. + + - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on tag + templates. + - ``datacatalog.entries.setIamPolicy`` to set policies on entries. + - ``datacatalog.entryGroups.setIamPolicy`` to set policies on entry + groups. Returns: Callable: A callable which accepts the appropriate @@ -367,13 +487,19 @@ def get_iam_policy(self): Supported resources are: - - Tag templates. Note, this method cannot be used to manage policies - for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for + BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. Callers must have following Google IAM permission - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on tag - templates. + + - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on tag + templates. + - ``datacatalog.entries.getIamPolicy`` to get policies on entries. + - ``datacatalog.entryGroups.getIamPolicy`` to get policies on entry + groups. Returns: Callable: A callable which accepts the appropriate @@ -390,10 +516,12 @@ def test_iam_permissions(self): exist, an empty set of permissions is returned (We don't return a ``NOT_FOUND`` error). - Supported resource are: + Supported resources are: - - tag templates. Note, this method cannot be used to manage policies - for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for + BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. A caller is not required to have Google IAM permission to make this diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog.proto index e89e7ad62671..8b67be1a0d29 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog.proto +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog.proto @@ -18,6 +18,10 @@ syntax = "proto3"; package google.cloud.datacatalog.v1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/datacatalog/v1beta1/gcs_fileset_spec.proto"; import "google/cloud/datacatalog/v1beta1/schema.proto"; import "google/cloud/datacatalog/v1beta1/search.proto"; import "google/cloud/datacatalog/v1beta1/table_spec.proto"; @@ -27,7 +31,6 @@ import "google/iam/v1/iam_policy.proto"; import "google/iam/v1/policy.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; -import "google/api/client.proto"; option cc_enable_arenas = true; option go_package = "google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog"; @@ -38,7 +41,8 @@ option java_package = "com.google.cloud.datacatalog"; // their data. service DataCatalog { option (google.api.default_host) = "datacatalog.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform"; // Searches Data Catalog for multiple resources like entries, tags that // match a query. @@ -46,7 +50,7 @@ service DataCatalog { // This is a custom method // (https://cloud.google.com/apis/design/custom_methods) and does not return // the complete resource, only the resource identifier and high level - // fields. Clients can subsequentally call Get methods. + // fields. Clients can subsequentally call `Get` methods. // // Note that searches do not have full recall. There may be results that match // your query but are not returned, even in subsequent pages of results. These @@ -60,14 +64,83 @@ service DataCatalog { post: "/v1beta1/catalog:search" body: "*" }; + option (google.api.method_signature) = "scope,query"; + } + + // Alpha feature. + // Creates an EntryGroup. + // The user should enable the Data Catalog API in the project identified by + // the `parent` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc CreateEntryGroup(CreateEntryGroupRequest) returns (EntryGroup) { + option (google.api.http) = { + post: "/v1beta1/{parent=projects/*/locations/*}/entryGroups" + body: "entry_group" + }; + option (google.api.method_signature) = "parent,entry_group_id,entry_group"; + } + + // Alpha feature. + // Gets an EntryGroup. + rpc GetEntryGroup(GetEntryGroupRequest) returns (EntryGroup) { + option (google.api.http) = { + get: "/v1beta1/{name=projects/*/locations/*/entryGroups/*}" + }; + option (google.api.method_signature) = "name"; + option (google.api.method_signature) = "name,read_mask"; + } + + // Alpha feature. + // Deletes an EntryGroup. Only entry groups that do not contain entries can be + // deleted. The user should enable the Data Catalog API in the project + // identified by the `name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc DeleteEntryGroup(DeleteEntryGroupRequest) + returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1beta1/{name=projects/*/locations/*/entryGroups/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Alpha feature. + // Creates an entry. Currently only entries of 'FILESET' type can be created. + // The user should enable the Data Catalog API in the project identified by + // the `parent` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc CreateEntry(CreateEntryRequest) returns (Entry) { + option (google.api.http) = { + post: "/v1beta1/{parent=projects/*/locations/*/entryGroups/*}/entries" + body: "entry" + }; + option (google.api.method_signature) = "parent,entry_id,entry"; } // Updates an existing entry. + // The user should enable the Data Catalog API in the project identified by + // the `entry.name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). rpc UpdateEntry(UpdateEntryRequest) returns (Entry) { option (google.api.http) = { patch: "/v1beta1/{entry.name=projects/*/locations/*/entryGroups/*/entries/*}" body: "entry" }; + option (google.api.method_signature) = "entry"; + option (google.api.method_signature) = "entry,update_mask"; + } + + // Alpha feature. + // Deletes an existing entry. Only entries created through + // [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry] + // method can be deleted. + // The user should enable the Data Catalog API in the project identified by + // the `name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc DeleteEntry(DeleteEntryRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}" + }; + option (google.api.method_signature) = "name"; } // Gets an entry. @@ -75,6 +148,7 @@ service DataCatalog { option (google.api.http) = { get: "/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}" }; + option (google.api.method_signature) = "name"; } // Get an entry by target resource name. This method allows clients to use @@ -86,12 +160,17 @@ service DataCatalog { }; } - // Creates a tag template. + // Creates a tag template. The user should enable the Data Catalog API in + // the project identified by the `parent` parameter (see [Data Catalog + // Resource Project](/data-catalog/docs/concepts/resource-project) for more + // information). rpc CreateTagTemplate(CreateTagTemplateRequest) returns (TagTemplate) { option (google.api.http) = { post: "/v1beta1/{parent=projects/*/locations/*}/tagTemplates" body: "tag_template" }; + option (google.api.method_signature) = + "parent,tag_template_id,tag_template"; } // Gets a tag template. @@ -99,63 +178,104 @@ service DataCatalog { option (google.api.http) = { get: "/v1beta1/{name=projects/*/locations/*/tagTemplates/*}" }; + option (google.api.method_signature) = "name"; } // Updates a tag template. This method cannot be used to update the fields of // a template. The tag template fields are represented as separate resources // and should be updated using their own create/update/delete methods. + // The user should enable the Data Catalog API in the project identified by + // the `tag_template.name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). rpc UpdateTagTemplate(UpdateTagTemplateRequest) returns (TagTemplate) { option (google.api.http) = { patch: "/v1beta1/{tag_template.name=projects/*/locations/*/tagTemplates/*}" body: "tag_template" }; + option (google.api.method_signature) = "tag_template"; + option (google.api.method_signature) = "tag_template,update_mask"; } // Deletes a tag template and all tags using the template. - rpc DeleteTagTemplate(DeleteTagTemplateRequest) returns (google.protobuf.Empty) { + // The user should enable the Data Catalog API in the project identified by + // the `name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc DeleteTagTemplate(DeleteTagTemplateRequest) + returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1beta1/{name=projects/*/locations/*/tagTemplates/*}" }; + option (google.api.method_signature) = "name,force"; } - // Creates a field in a tag template. - rpc CreateTagTemplateField(CreateTagTemplateFieldRequest) returns (TagTemplateField) { + // Creates a field in a tag template. The user should enable the Data Catalog + // API in the project identified by the `parent` parameter (see + // [Data Catalog Resource + // Project](/data-catalog/docs/concepts/resource-project) for more + // information). + rpc CreateTagTemplateField(CreateTagTemplateFieldRequest) + returns (TagTemplateField) { option (google.api.http) = { post: "/v1beta1/{parent=projects/*/locations/*/tagTemplates/*}/fields" body: "tag_template_field" }; + option (google.api.method_signature) = + "parent,tag_template_field_id,tag_template_field"; } // Updates a field in a tag template. This method cannot be used to update the - // field type. - rpc UpdateTagTemplateField(UpdateTagTemplateFieldRequest) returns (TagTemplateField) { + // field type. The user should enable the Data Catalog API in the project + // identified by the `name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc UpdateTagTemplateField(UpdateTagTemplateFieldRequest) + returns (TagTemplateField) { option (google.api.http) = { patch: "/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}" body: "tag_template_field" }; + option (google.api.method_signature) = "name,tag_template_field"; + option (google.api.method_signature) = + "name,tag_template_field,update_mask"; } - // Renames a field in a tag template. - rpc RenameTagTemplateField(RenameTagTemplateFieldRequest) returns (TagTemplateField) { + // Renames a field in a tag template. The user should enable the Data Catalog + // API in the project identified by the `name` parameter (see [Data Catalog + // Resource Project](/data-catalog/docs/concepts/resource-project) for more + // information). + rpc RenameTagTemplateField(RenameTagTemplateFieldRequest) + returns (TagTemplateField) { option (google.api.http) = { post: "/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:rename" body: "*" }; + option (google.api.method_signature) = "name,new_tag_template_field_id"; } // Deletes a field in a tag template and all uses of that field. - rpc DeleteTagTemplateField(DeleteTagTemplateFieldRequest) returns (google.protobuf.Empty) { + // The user should enable the Data Catalog API in the project identified by + // the `name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc DeleteTagTemplateField(DeleteTagTemplateFieldRequest) + returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}" }; + option (google.api.method_signature) = "name,force"; } // Creates a tag on an [Entry][google.cloud.datacatalog.v1beta1.Entry]. + // Note: The project identified by the `parent` parameter for the + // [tag](/data-catalog/docs/reference/rest/v1beta1/projects.locations.entryGroups.entries.tags/create#path-parameters) + // and the + // [tag + // template](/data-catalog/docs/reference/rest/v1beta1/projects.locations.tagTemplates/create#path-parameters) + // used to create the tag must be from the same organization. rpc CreateTag(CreateTagRequest) returns (Tag) { option (google.api.http) = { post: "/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags" body: "tag" }; + option (google.api.method_signature) = "parent,tag"; } // Updates an existing tag. @@ -164,6 +284,8 @@ service DataCatalog { patch: "/v1beta1/{tag.name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}" body: "tag" }; + option (google.api.method_signature) = "tag"; + option (google.api.method_signature) = "tag,update_mask"; } // Deletes a tag. @@ -171,6 +293,7 @@ service DataCatalog { option (google.api.http) = { delete: "/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}" }; + option (google.api.method_signature) = "name"; } // Lists the tags on an [Entry][google.cloud.datacatalog.v1beta1.Entry]. @@ -178,22 +301,37 @@ service DataCatalog { option (google.api.http) = { get: "/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags" }; + option (google.api.method_signature) = "parent"; } // Sets the access control policy for a resource. Replaces any existing // policy. // Supported resources are: // - Tag templates. + // - Entries. + // - Entry groups. // Note, this method cannot be used to manage policies for BigQuery, Cloud // Pub/Sub and any external Google Cloud Platform resources synced to Cloud // Data Catalog. // // Callers must have following Google IAM permission - // `datacatalog.tagTemplates.setIamPolicy` to set policies on tag templates. - rpc SetIamPolicy(google.iam.v1.SetIamPolicyRequest) returns (google.iam.v1.Policy) { + // - `datacatalog.tagTemplates.setIamPolicy` to set policies on tag + // templates. + // - `datacatalog.entries.setIamPolicy` to set policies on entries. + // - `datacatalog.entryGroups.setIamPolicy` to set policies on entry groups. + rpc SetIamPolicy(google.iam.v1.SetIamPolicyRequest) + returns (google.iam.v1.Policy) { option (google.api.http) = { post: "/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:setIamPolicy" body: "*" + additional_bindings { + post: "/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:setIamPolicy" + body: "*" + } + additional_bindings { + post: "/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:setIamPolicy" + body: "*" + } }; } @@ -203,16 +341,30 @@ service DataCatalog { // // Supported resources are: // - Tag templates. + // - Entries. + // - Entry groups. // Note, this method cannot be used to manage policies for BigQuery, Cloud // Pub/Sub and any external Google Cloud Platform resources synced to Cloud // Data Catalog. // // Callers must have following Google IAM permission - // `datacatalog.tagTemplates.getIamPolicy` to get policies on tag templates. - rpc GetIamPolicy(google.iam.v1.GetIamPolicyRequest) returns (google.iam.v1.Policy) { + // - `datacatalog.tagTemplates.getIamPolicy` to get policies on tag + // templates. + // - `datacatalog.entries.getIamPolicy` to get policies on entries. + // - `datacatalog.entryGroups.getIamPolicy` to get policies on entry groups. + rpc GetIamPolicy(google.iam.v1.GetIamPolicyRequest) + returns (google.iam.v1.Policy) { option (google.api.http) = { post: "/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:getIamPolicy" body: "*" + additional_bindings { + post: "/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:getIamPolicy" + body: "*" + } + additional_bindings { + post: "/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:getIamPolicy" + body: "*" + } }; } @@ -220,18 +372,29 @@ service DataCatalog { // If the resource does not exist, an empty set of permissions is returned // (We don't return a `NOT_FOUND` error). // - // Supported resource are: - // - tag templates. + // Supported resources are: + // - Tag templates. + // - Entries. + // - Entry groups. // Note, this method cannot be used to manage policies for BigQuery, Cloud // Pub/Sub and any external Google Cloud Platform resources synced to Cloud // Data Catalog. // // A caller is not required to have Google IAM permission to make this // request. - rpc TestIamPermissions(google.iam.v1.TestIamPermissionsRequest) returns (google.iam.v1.TestIamPermissionsResponse) { + rpc TestIamPermissions(google.iam.v1.TestIamPermissionsRequest) + returns (google.iam.v1.TestIamPermissionsResponse) { option (google.api.http) = { post: "/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:testIamPermissions" body: "*" + additional_bindings { + post: "/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:testIamPermissions" + body: "*" + } + additional_bindings { + post: "/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:testIamPermissions" + body: "*" + } }; } } @@ -242,21 +405,21 @@ message SearchCatalogRequest { message Scope { // Data Catalog tries to automatically choose the right corpus of data to // search through. You can ensure an organization is included by adding it - // to "include_org_ids". You can ensure a project's org is included with - // "include_project_ids". You must specify at least one organization - // using "include_org_ids" or "include_project_ids" in all search requests. + // to `include_org_ids`. You can ensure a project's org is included with + // `include_project_ids`. You must specify at least one organization + // using `include_org_ids` or `include_project_ids` in all search requests. // // List of organization IDs to search within. To find your organization ID, // follow instructions in - // https://cloud.google.com/resource-manager/docs/creating-managing-organization + // https://cloud.google.com/resource-manager/docs/creating-managing-organization. repeated string include_org_ids = 2; // List of project IDs to search within. To learn more about the // distinction between project names/IDs/numbers, go to - // https://cloud.google.com/docs/overview/#projects + // https://cloud.google.com/docs/overview/#projects. repeated string include_project_ids = 3; - // If true, include Google Cloud Platform (GCP) public datasets in the + // If `true`, include Google Cloud Platform (GCP) public datasets in the // search results. Info on GCP public datasets is available at // https://cloud.google.com/public-datasets/. By default, GCP public // datasets are excluded. @@ -264,7 +427,7 @@ message SearchCatalogRequest { } // Required. The scope of this search request. - Scope scope = 6; + Scope scope = 6 [(google.api.field_behavior) = REQUIRED]; // Required. The query string in search query syntax. The query must be // non-empty. @@ -278,36 +441,37 @@ message SearchCatalogRequest { // Note: Query tokens need to have a minimum of 3 characters for substring // matching to work correctly. See [Data Catalog Search // Syntax](/data-catalog/docs/how-to/search-reference) for more information. - string query = 1; + string query = 1 [(google.api.field_behavior) = REQUIRED]; // Number of results in the search page. If <=0 then defaults to 10. Max limit // for page_size is 1000. Throws an invalid argument for page_size > 1000. int32 page_size = 2; - // Optional pagination token returned in an earlier - // [SearchCatalogResponse.next_page_token][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalogResponse.next_page_token]; - // indicates that this is a continuation of a prior - // [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog] - // call, and that the system should return the next page of data. If empty - // then the first page is returned. - string page_token = 3; + // Optional. Pagination token returned in an earlier + // [SearchCatalogResponse.next_page_token][google.cloud.datacatalog.v1beta1.SearchCatalogResponse.next_page_token], + // which indicates that this is a continuation of a prior + // [SearchCatalogRequest][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog] + // call, and that the system should return the next page of data. If empty, + // the first page is returned. + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; // Specifies the ordering of results, currently supported case-sensitive // choices are: - //
    - //
  • relevance
  • - //
  • last_access_timestamp [asc|desc], defaults to descending if not - // specified,
  • - //
  • last_modified_timestamp [asc|desc], defaults to descending if not - // specified.
  • - //
+ // + // * `relevance`, only supports desecending + // * `last_access_timestamp [asc|desc]`, defaults to descending if not + // specified + // * `last_modified_timestamp [asc|desc]`, defaults to descending if not + // specified + // + // If not specified, defaults to `relevance` descending. string order_by = 5; } // Response message for // [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. message SearchCatalogResponse { - // Search results in descending order of relevance. + // Search results. repeated SearchCatalogResult results = 1; // The token that can be used to retrieve the next page of results. @@ -315,32 +479,132 @@ message SearchCatalogResponse { } // Request message for -// [UpdateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry]. -message UpdateEntryRequest { - // Required. The updated Entry. - Entry entry = 1; - - // Optional. The fields to update on the entry. If absent or empty, all - // modifiable fields are updated. +// [CreateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup]. +message CreateEntryGroupRequest { + // Required. The name of the project this entry group is in. Example: // - // Modifiable fields in synced entries: + // * projects/{project_id}/locations/{location} // - // 1. schema (Pub/Sub topics only) + // Note that this EntryGroup and its child resources may not actually be + // stored in the location in this name. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/EntryGroup" + } + ]; + + // Required. The id of the entry group to create. + string entry_group_id = 3 [(google.api.field_behavior) = REQUIRED]; + + // The entry group to create. Defaults to an empty entry group. + EntryGroup entry_group = 2; +} + +// Request message for +// [GetEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup]. +message GetEntryGroupRequest { + // Required. The name of the entry group. For example, + // `projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/EntryGroup" + } + ]; + + // The fields to return. If not set or empty, all fields are returned. + google.protobuf.FieldMask read_mask = 2; +} + +// Request message for +// [DeleteEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup]. +message DeleteEntryGroupRequest { + // Required. The name of the entry group. For example, + // `projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/EntryGroup" + } + ]; +} + +// Request message for +// [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry]. +message CreateEntryRequest { + // Required. The name of the entry group this entry is in. Example: // - // Modifiable fields in native entries: + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} // - // 1. display_name - // 2. description - // 3. schema + // Note that this Entry and its child resources may not actually be stored in + // the location in this name. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/EntryGroup" + } + ]; + + // Required. The id of the entry to create. + string entry_id = 3 [(google.api.field_behavior) = REQUIRED]; + + // Required. The entry to create. + Entry entry = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for +// [UpdateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry]. +message UpdateEntryRequest { + // Required. The updated entry. The "name" field must be set. + Entry entry = 1 [(google.api.field_behavior) = REQUIRED]; + + // The fields to update on the entry. If absent or empty, all modifiable + // fields are updated. + // + // The following fields are modifiable: + // * For entries with type `DATA_STREAM`: + // * `schema` + // * For entries with type `FILESET` + // * `schema` + // * `display_name` + // * `description` + // * `gcs_fileset_spec` + // * `gcs_fileset_spec.file_patterns` google.protobuf.FieldMask update_mask = 2; } +// Request message for +// [DeleteEntry][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry]. +message DeleteEntryRequest { + // Required. The name of the entry. Example: + // + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Entry" + } + ]; +} + // Request message for // [GetEntry][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry]. message GetEntryRequest { - // Required. The name of the entry. For example, - // "projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}". - string name = 1; + // Required. The name of the entry. Example: + // + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + // + // Entry groups are logical groupings of entries. Currently, users cannot + // create/modify entry groups. They are created by Data Catalog; they include + // `@bigquery` for all BigQuery entries, and `@pubsub` for all Cloud Pub/Sub + // entries. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Entry" + } + ]; } // Request message for @@ -351,215 +615,327 @@ message LookupEntryRequest { oneof target_name { // The full name of the Google Cloud Platform resource the Data Catalog // entry represents. See: - // https://cloud.google.com/apis/design/resource_names#full_resource_name + // https://cloud.google.com/apis/design/resource_names#full_resource_name. // Full names are case-sensitive. // // Examples: - // "//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId". - // "//pubsub.googleapis.com/projects/projectId/topics/topicId" + // + // * //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId + // * //pubsub.googleapis.com/projects/projectId/topics/topicId string linked_resource = 1; // The SQL name of the entry. SQL names are case-sensitive. // // Examples: - //
    - //
  • cloud_pubsub.project_id.topic_id
  • - //
  • pubsub.project_id.`topic.id.with.dots`
  • - //
  • bigquery.project_id.dataset_id.table_id
  • - //
  • datacatalog.project_id.location_id.entry_group_id.entry_id
  • - //
- // *_ids shoud satisfy the standard SQL rules for identifiers. - // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical + // + // * `cloud_pubsub.project_id.topic_id` + // * ``pubsub.project_id.`topic.id.with.dots` `` + // * `bigquery.project_id.dataset_id.table_id` + // * `datacatalog.project_id.location_id.entry_group_id.entry_id` + // + // `*_id`s shoud satisfy the standard SQL rules for identifiers. + // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical. string sql_resource = 3; } } // Entry Metadata. // A Data Catalog Entry resource represents another resource in Google -// Cloud Platform, such as a BigQuery Dataset or a Pub/Sub Topic. Clients can -// use the `linked_resource` field in the Entry resource to refer to the -// original resource id of the source system. +// Cloud Platform, such as a BigQuery dataset or a Cloud Pub/Sub topic. +// Clients can use the `linked_resource` field in the Entry resource to refer to +// the original resource ID of the source system. // // An Entry resource contains resource details, such as its schema. An Entry can // also be used to attach flexible metadata, such as a // [Tag][google.cloud.datacatalog.v1beta1.Tag]. message Entry { - // Required when used in - // [UpdateEntryRequest][google.cloud.datacatalog.v1beta1.UpdateEntryRequest]. - // The Data Catalog resource name of the entry in URL format. For example, - // "projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}". + option (google.api.resource) = { + type: "datacatalog.googleapis.com/Entry" + pattern: "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}" + }; + + // The Data Catalog resource name of the entry in URL format. Example: + // + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + // // Note that this Entry and its child resources may not actually be stored in // the location in this name. - string name = 1; + string name = 1 [(google.api.resource_reference) = { + type: "datacatalog.googleapis.com/EntryGroup" + }]; - // Output only. The full name of the cloud resource the entry belongs to. See: - // https://cloud.google.com/apis/design/resource_names#full_resource_name + // Output only. The resource this metadata entry refers to. // - // Data Catalog supports resources from select Google Cloud Platform systems. - // `linked_resource` is the full name of the Google Cloud Platform resource. + // For Google Cloud Platform resources, `linked_resource` is the [full name of + // the + // resource](https://cloud.google.com/apis/design/resource_names#full_resource_name). // For example, the `linked_resource` for a table resource from BigQuery is: // - // "//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId". - string linked_resource = 9; + // * //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId + string linked_resource = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Required. Type of entry. - EntryType type = 2; + // Required. Entry type. + oneof entry_type { + // The type of the entry. + EntryType type = 2; + } - // Optional. Type specification information. + // Type specification information. oneof type_spec { + // Specification that applies to a Cloud Storage fileset. This is only valid + // on entries of type FILESET. + GcsFilesetSpec gcs_fileset_spec = 6; + // Specification that applies to a BigQuery table. This is only valid on - // entries of type TABLE. + // entries of type `TABLE`. BigQueryTableSpec bigquery_table_spec = 12; // Specification for a group of BigQuery tables with name pattern - // [prefix]YYYYMMDD. Context: - // https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding + // `[prefix]YYYYMMDD`. Context: + // https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding. BigQueryDateShardedSpec bigquery_date_sharded_spec = 15; } - // Optional. Display information such as title and description. A short name - // to identify the entry, for example, "Analytics Data - Jan 2011". Default - // value is an empty string. + // Display information such as title and description. A short name to identify + // the entry, for example, "Analytics Data - Jan 2011". Default value is an + // empty string. string display_name = 3; - // Optional. Entry description, which can consist of several sentences or - // paragraphs that describe entry contents. Default value is an empty string. + // Entry description, which can consist of several sentences or paragraphs + // that describe entry contents. Default value is an empty string. string description = 4; - // Optional. Schema of the entry. An entry might not have any schema attached - // to it. + // Schema of the entry. An entry might not have any schema attached to it. Schema schema = 5; - // Output only. Timestamps about the underlying Google Cloud Platform resource - // -- not about this Data Catalog Entry. - SystemTimestamps source_system_timestamps = 7; + // Output only. Timestamps about the underlying Google Cloud Platform + // resource, not about this Data Catalog Entry. + SystemTimestamps source_system_timestamps = 7 + [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// EntryGroup Metadata. +// An EntryGroup resource represents a logical grouping of zero or more +// Data Catalog [Entry][google.cloud.datacatalog.v1beta1.Entry] resources. +message EntryGroup { + option (google.api.resource) = { + type: "datacatalog.googleapis.com/EntryGroup" + pattern: "projects/{project}/locations/{location}/entryGroups/{entry_group}" + }; + + // The resource name of the entry group in URL format. Example: + // + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + // + // Note that this EntryGroup and its child resources may not actually be + // stored in the location in this name. + string name = 1; + + // A short name to identify the entry group, for example, + // "analytics data - jan 2011". Default value is an empty string. + string display_name = 2; + + // Entry group description, which can consist of several sentences or + // paragraphs that describe entry group contents. Default value is an empty + // string. + string description = 3; + + // Output only. Timestamps about this EntryGroup. Default value is empty + // timestamps. + SystemTimestamps data_catalog_timestamps = 4 + [(google.api.field_behavior) = OUTPUT_ONLY]; } // Request message for // [CreateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate]. message CreateTagTemplateRequest { // Required. The name of the project and the location this template is in. - // Example: "projects/{project_id}/locations/{location}". Note that this + // Example: + // + // * projects/{project_id}/locations/{location} + // // TagTemplate and its child resources may not actually be stored in the // location in this name. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/TagTemplate" + } + ]; // Required. The id of the tag template to create. - string tag_template_id = 3; + string tag_template_id = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The tag template to create. - TagTemplate tag_template = 2; + TagTemplate tag_template = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for // [GetTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate]. message GetTagTemplateRequest { - // Required. The name of the tag template. For example, - // "projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}". - string name = 1; + // Required. The name of the tag template. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplate" + } + ]; +} + +// Entry resources in Data Catalog can be of different types e.g. a BigQuery +// Table entry is of type `TABLE`. This enum describes all the possible types +// Data Catalog contains. +enum EntryType { + // Default unknown type + ENTRY_TYPE_UNSPECIFIED = 0; + + // Output only. The type of entry that has a GoogleSQL schema, including + // logical views. + TABLE = 2; + + // Output only. An entry type which is used for streaming entries. Example: + // Cloud Pub/Sub topic. + DATA_STREAM = 3; + + // Alpha feature. An entry type which is a set of files or objects. Example: + // Cloud Storage fileset. + FILESET = 4; } // Request message for // [UpdateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate]. message UpdateTagTemplateRequest { - // Required. The template to update. - TagTemplate tag_template = 1; + // Required. The template to update. The "name" field must be set. + TagTemplate tag_template = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. The field mask specifies the parts of the template to overwrite. + // The field mask specifies the parts of the template to overwrite. // // Allowed fields: // - // * display_name + // * `display_name` // - // If update_mask is omitted, all of the allowed fields above will be updated. + // If absent or empty, all of the allowed fields above will be updated. google.protobuf.FieldMask update_mask = 2; } // Request message for // [DeleteTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate]. message DeleteTagTemplateRequest { - // Required. The name of the tag template to delete. For example, - // "projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}". - string name = 1; - - // Required. Currently, this field must always be set to true. + // Required. The name of the tag template to delete. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplate" + } + ]; + + // Required. Currently, this field must always be set to `true`. // This confirms the deletion of any possible tags using this template. - // force = false will be supported in the future. - bool force = 2; + // `force = false` will be supported in the future. + bool force = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for // [CreateTag][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag]. message CreateTagRequest { - // Required. - // The name of the resource to attach this tag to. Tags can be attached to - // Entries. (example: - // "projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}"). + // Required. The name of the resource to attach this tag to. Tags can be + // attached to Entries. Example: + // + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + // // Note that this Tag and its child resources may not actually be stored in // the location in this name. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "datacatalog.googleapis.com/Tag" } + ]; // Required. The tag to create. - Tag tag = 2; + Tag tag = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for // [UpdateTag][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag]. message UpdateTagRequest { - // Required. The updated tag. - Tag tag = 1; + // Required. The updated tag. The "name" field must be set. + Tag tag = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. The fields to update on the Tag. If absent or empty, all - // modifiable fields are updated. Currently the only modifiable field is the - // field `fields`. + // The fields to update on the Tag. If absent or empty, all modifiable fields + // are updated. Currently the only modifiable field is the field `fields`. google.protobuf.FieldMask update_mask = 2; } // Request message for // [DeleteTag][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag]. message DeleteTagRequest { - // Required. The name of the tag to delete. For example, - // "projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id}". - string name = 1; + // Required. The name of the tag to delete. Example: + // + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/Tag" + } + ]; } // Request message for // [CreateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField]. message CreateTagTemplateFieldRequest { // Required. The name of the project this template is in. Example: - // "projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}". + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + // // Note that this TagTemplateField may not actually be stored in the location // in this name. - string parent = 1; - - // Required. The id of the tag template field to create. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplate" + } + ]; + + // Required. The ID of the tag template field to create. // Field ids can contain letters (both uppercase and lowercase), numbers - // (0-9), underscores (_) and dashes (-). Field ids must be at least 1 - // character long and at most 128 characters long. Field ids must also be - // unique to their template. - string tag_template_field_id = 2; + // (0-9), underscores (_) and dashes (-). Field IDs must be at least 1 + // character long and at most 128 characters long. Field IDs must also be + // unique within their template. + string tag_template_field_id = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The tag template field to create. - TagTemplateField tag_template_field = 3; + TagTemplateField tag_template_field = 3 + [(google.api.field_behavior) = REQUIRED]; } // Request message for // [UpdateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField]. message UpdateTagTemplateFieldRequest { - // Required. The name of the tag template field. For example, - // "projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}". - string name = 1; + // Required. The name of the tag template field. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplateField" + } + ]; // Required. The template to update. - TagTemplateField tag_template_field = 2; + TagTemplateField tag_template_field = 2 + [(google.api.field_behavior) = REQUIRED]; - // Optional. The field mask specifies the parts of the template to overwrite. + // The field mask specifies the parts of the template to be updated. // Allowed fields: // - // * display_name - // * type.enum_type + // * `display_name` + // * `type.enum_type` // - // If update_mask is omitted, all of the allowed fields above will be updated. + // If `update_mask` is not set or empty, all of the allowed fields above will + // be updated. // // When updating an enum type, the provided values will be merged with the // existing values. Therefore, enum values can only be added, existing enum @@ -570,40 +946,38 @@ message UpdateTagTemplateFieldRequest { // Request message for // [RenameTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField]. message RenameTagTemplateFieldRequest { - // Required. The name of the tag template. For example, - // "projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}". - string name = 1; + // Required. The name of the tag template. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplateField" + } + ]; // Required. The new ID of this tag template field. For example, - // "my_new_field". - string new_tag_template_field_id = 2; -} - -// Entry resources in Data Catalog can be of different types e.g. BigQuery -// Table entry is of type 'TABLE'. This enum describes all the possible types -// Data Catalog contains. -enum EntryType { - // Default unknown type - ENTRY_TYPE_UNSPECIFIED = 0; - - // The type of entry that has a GoogleSQL schema, including logical views. - TABLE = 2; - - // An entry type which is used for streaming entries. Example - Pub/Sub. - DATA_STREAM = 3; + // `my_new_field`. + string new_tag_template_field_id = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for // [DeleteTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField]. message DeleteTagTemplateFieldRequest { - // Required. The name of the tag template field to delete. For example, - // "projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}". - string name = 1; - - // Required. Currently, this field must always be set to true. + // Required. The name of the tag template field to delete. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplateField" + } + ]; + + // Required. Currently, this field must always be set to `true`. // This confirms the deletion of this field from any tags using this field. - // force = false will be supported in the future. - bool force = 2; + // `force = false` will be supported in the future. + bool force = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for @@ -611,14 +985,18 @@ message DeleteTagTemplateFieldRequest { message ListTagsRequest { // Required. The name of the Data Catalog resource to list the tags of. The // resource could be an [Entry][google.cloud.datacatalog.v1beta1.Entry]. - string parent = 1; - - // Optional. The maximum number of tags to return. Default is 10. Max limit is - // 1000. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/Tag" + } + ]; + + // The maximum number of tags to return. Default is 10. Max limit is 1000. int32 page_size = 2; - // Optional. Token that specifies which page is requested. If empty, the first - // page is returned. + // Token that specifies which page is requested. If empty, the first page is + // returned. string page_token = 3; } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2.py index 10641f51e1ab..01f2e79352d9 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2.py @@ -17,6 +17,12 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.cloud.datacatalog_v1beta1.proto import ( + gcs_fileset_spec_pb2 as google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_gcs__fileset__spec__pb2, +) from google.cloud.datacatalog_v1beta1.proto import ( schema_pb2 as google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_schema__pb2, ) @@ -36,7 +42,6 @@ from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -47,10 +52,14 @@ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" ), serialized_pb=_b( - '\n8google/cloud/datacatalog_v1beta1/proto/datacatalog.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x33google/cloud/datacatalog_v1beta1/proto/schema.proto\x1a\x33google/cloud/datacatalog_v1beta1/proto/search.proto\x1a\x37google/cloud/datacatalog_v1beta1/proto/table_spec.proto\x1a\x31google/cloud/datacatalog_v1beta1/proto/tags.proto\x1a\x37google/cloud/datacatalog_v1beta1/proto/timestamps.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/api/client.proto"\x8f\x02\n\x14SearchCatalogRequest\x12K\n\x05scope\x18\x06 \x01(\x0b\x32<.google.cloud.datacatalog.v1beta1.SearchCatalogRequest.Scope\x12\r\n\x05query\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x10\n\x08order_by\x18\x05 \x01(\t\x1a\x62\n\x05Scope\x12\x17\n\x0finclude_org_ids\x18\x02 \x03(\t\x12\x1b\n\x13include_project_ids\x18\x03 \x03(\t\x12#\n\x1binclude_gcp_public_datasets\x18\x07 \x01(\x08"x\n\x15SearchCatalogResponse\x12\x46\n\x07results\x18\x01 \x03(\x0b\x32\x35.google.cloud.datacatalog.v1beta1.SearchCatalogResult\x12\x17\n\x0fnext_page_token\x18\x03 \x01(\t"}\n\x12UpdateEntryRequest\x12\x36\n\x05\x65ntry\x18\x01 \x01(\x0b\x32\'.google.cloud.datacatalog.v1beta1.Entry\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\x1f\n\x0fGetEntryRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"V\n\x12LookupEntryRequest\x12\x19\n\x0flinked_resource\x18\x01 \x01(\tH\x00\x12\x16\n\x0csql_resource\x18\x03 \x01(\tH\x00\x42\r\n\x0btarget_name"\xe6\x03\n\x05\x45ntry\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0flinked_resource\x18\t \x01(\t\x12\x39\n\x04type\x18\x02 \x01(\x0e\x32+.google.cloud.datacatalog.v1beta1.EntryType\x12R\n\x13\x62igquery_table_spec\x18\x0c \x01(\x0b\x32\x33.google.cloud.datacatalog.v1beta1.BigQueryTableSpecH\x00\x12_\n\x1a\x62igquery_date_sharded_spec\x18\x0f \x01(\x0b\x32\x39.google.cloud.datacatalog.v1beta1.BigQueryDateShardedSpecH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x38\n\x06schema\x18\x05 \x01(\x0b\x32(.google.cloud.datacatalog.v1beta1.Schema\x12T\n\x18source_system_timestamps\x18\x07 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.SystemTimestampsB\x0b\n\ttype_spec"\x88\x01\n\x18\x43reateTagTemplateRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x17\n\x0ftag_template_id\x18\x03 \x01(\t\x12\x43\n\x0ctag_template\x18\x02 \x01(\x0b\x32-.google.cloud.datacatalog.v1beta1.TagTemplate"%\n\x15GetTagTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x90\x01\n\x18UpdateTagTemplateRequest\x12\x43\n\x0ctag_template\x18\x01 \x01(\x0b\x32-.google.cloud.datacatalog.v1beta1.TagTemplate\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"7\n\x18\x44\x65leteTagTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05\x66orce\x18\x02 \x01(\x08"V\n\x10\x43reateTagRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\x03tag\x18\x02 \x01(\x0b\x32%.google.cloud.datacatalog.v1beta1.Tag"w\n\x10UpdateTagRequest\x12\x32\n\x03tag\x18\x01 \x01(\x0b\x32%.google.cloud.datacatalog.v1beta1.Tag\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask" \n\x10\x44\x65leteTagRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x9e\x01\n\x1d\x43reateTagTemplateFieldRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x1d\n\x15tag_template_field_id\x18\x02 \x01(\t\x12N\n\x12tag_template_field\x18\x03 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"\xae\x01\n\x1dUpdateTagTemplateFieldRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12N\n\x12tag_template_field\x18\x02 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.TagTemplateField\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"P\n\x1dRenameTagTemplateFieldRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12!\n\x19new_tag_template_field_id\x18\x02 \x01(\t"<\n\x1d\x44\x65leteTagTemplateFieldRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05\x66orce\x18\x02 \x01(\x08"H\n\x0fListTagsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"`\n\x10ListTagsResponse\x12\x33\n\x04tags\x18\x01 \x03(\x0b\x32%.google.cloud.datacatalog.v1beta1.Tag\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t*C\n\tEntryType\x12\x1a\n\x16\x45NTRY_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05TABLE\x10\x02\x12\x0f\n\x0b\x44\x41TA_STREAM\x10\x03\x32\xf8\x1c\n\x0b\x44\x61taCatalog\x12\xa4\x01\n\rSearchCatalog\x12\x36.google.cloud.datacatalog.v1beta1.SearchCatalogRequest\x1a\x37.google.cloud.datacatalog.v1beta1.SearchCatalogResponse""\x82\xd3\xe4\x93\x02\x1c"\x17/v1beta1/catalog:search:\x01*\x12\xc1\x01\n\x0bUpdateEntry\x12\x34.google.cloud.datacatalog.v1beta1.UpdateEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"S\x82\xd3\xe4\x93\x02M2D/v1beta1/{entry.name=projects/*/locations/*/entryGroups/*/entries/*}:\x05\x65ntry\x12\xae\x01\n\x08GetEntry\x12\x31.google.cloud.datacatalog.v1beta1.GetEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"F\x82\xd3\xe4\x93\x02@\x12>/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}\x12\x8d\x01\n\x0bLookupEntry\x12\x34.google.cloud.datacatalog.v1beta1.LookupEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"\x1f\x82\xd3\xe4\x93\x02\x19\x12\x17/v1beta1/entries:lookup\x12\xcb\x01\n\x11\x43reateTagTemplate\x12:.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest\x1a-.google.cloud.datacatalog.v1beta1.TagTemplate"K\x82\xd3\xe4\x93\x02\x45"5/v1beta1/{parent=projects/*/locations/*}/tagTemplates:\x0ctag_template\x12\xb7\x01\n\x0eGetTagTemplate\x12\x37.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest\x1a-.google.cloud.datacatalog.v1beta1.TagTemplate"=\x82\xd3\xe4\x93\x02\x37\x12\x35/v1beta1/{name=projects/*/locations/*/tagTemplates/*}\x12\xd8\x01\n\x11UpdateTagTemplate\x12:.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest\x1a-.google.cloud.datacatalog.v1beta1.TagTemplate"X\x82\xd3\xe4\x93\x02R2B/v1beta1/{tag_template.name=projects/*/locations/*/tagTemplates/*}:\x0ctag_template\x12\xa6\x01\n\x11\x44\x65leteTagTemplate\x12:.google.cloud.datacatalog.v1beta1.DeleteTagTemplateRequest\x1a\x16.google.protobuf.Empty"=\x82\xd3\xe4\x93\x02\x37*5/v1beta1/{name=projects/*/locations/*/tagTemplates/*}\x12\xe9\x01\n\x16\x43reateTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.CreateTagTemplateFieldRequest\x1a\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"Z\x82\xd3\xe4\x93\x02T">/v1beta1/{parent=projects/*/locations/*/tagTemplates/*}/fields:\x12tag_template_field\x12\xe9\x01\n\x16UpdateTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.UpdateTagTemplateFieldRequest\x1a\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"Z\x82\xd3\xe4\x93\x02T2>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:\x12tag_template_field\x12\xdf\x01\n\x16RenameTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.RenameTagTemplateFieldRequest\x1a\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"P\x82\xd3\xe4\x93\x02J"E/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:rename:\x01*\x12\xb9\x01\n\x16\x44\x65leteTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.DeleteTagTemplateFieldRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02@*>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}\x12\xba\x01\n\tCreateTag\x12\x32.google.cloud.datacatalog.v1beta1.CreateTagRequest\x1a%.google.cloud.datacatalog.v1beta1.Tag"R\x82\xd3\xe4\x93\x02L"E/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags:\x03tag\x12\xbe\x01\n\tUpdateTag\x12\x32.google.cloud.datacatalog.v1beta1.UpdateTagRequest\x1a%.google.cloud.datacatalog.v1beta1.Tag"V\x82\xd3\xe4\x93\x02P2I/v1beta1/{tag.name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}:\x03tag\x12\xa6\x01\n\tDeleteTag\x12\x32.google.cloud.datacatalog.v1beta1.DeleteTagRequest\x1a\x16.google.protobuf.Empty"M\x82\xd3\xe4\x93\x02G*E/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}\x12\xc0\x01\n\x08ListTags\x12\x31.google.cloud.datacatalog.v1beta1.ListTagsRequest\x1a\x32.google.cloud.datacatalog.v1beta1.ListTagsResponse"M\x82\xd3\xe4\x93\x02G\x12\x45/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags\x12\x9c\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"Q\x82\xd3\xe4\x93\x02K"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:setIamPolicy:\x01*\x12\x9c\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"Q\x82\xd3\xe4\x93\x02K"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:getIamPolicy:\x01*\x12\xc2\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"W\x82\xd3\xe4\x93\x02Q"L/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:testIamPermissions:\x01*\x1aN\xca\x41\x1a\x64\x61tacatalog.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + '\n8google/cloud/datacatalog_v1beta1/proto/datacatalog.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a=google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto\x1a\x33google/cloud/datacatalog_v1beta1/proto/schema.proto\x1a\x33google/cloud/datacatalog_v1beta1/proto/search.proto\x1a\x37google/cloud/datacatalog_v1beta1/proto/table_spec.proto\x1a\x31google/cloud/datacatalog_v1beta1/proto/tags.proto\x1a\x37google/cloud/datacatalog_v1beta1/proto/timestamps.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x9e\x02\n\x14SearchCatalogRequest\x12P\n\x05scope\x18\x06 \x01(\x0b\x32<.google.cloud.datacatalog.v1beta1.SearchCatalogRequest.ScopeB\x03\xe0\x41\x02\x12\x12\n\x05query\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x10\n\x08order_by\x18\x05 \x01(\t\x1a\x62\n\x05Scope\x12\x17\n\x0finclude_org_ids\x18\x02 \x03(\t\x12\x1b\n\x13include_project_ids\x18\x03 \x03(\t\x12#\n\x1binclude_gcp_public_datasets\x18\x07 \x01(\x08"x\n\x15SearchCatalogResponse\x12\x46\n\x07results\x18\x01 \x03(\x0b\x32\x35.google.cloud.datacatalog.v1beta1.SearchCatalogResult\x12\x17\n\x0fnext_page_token\x18\x03 \x01(\t"\xb8\x01\n\x17\x43reateEntryGroupRequest\x12=\n\x06parent\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\x12%datacatalog.googleapis.com/EntryGroup\x12\x1b\n\x0e\x65ntry_group_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\x0b\x65ntry_group\x18\x02 \x01(\x0b\x32,.google.cloud.datacatalog.v1beta1.EntryGroup"\x82\x01\n\x14GetEntryGroupRequest\x12;\n\x04name\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\n%datacatalog.googleapis.com/EntryGroup\x12-\n\tread_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"V\n\x17\x44\x65leteEntryGroupRequest\x12;\n\x04name\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\n%datacatalog.googleapis.com/EntryGroup"\xa7\x01\n\x12\x43reateEntryRequest\x12=\n\x06parent\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\n%datacatalog.googleapis.com/EntryGroup\x12\x15\n\x08\x65ntry_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12;\n\x05\x65ntry\x18\x02 \x01(\x0b\x32\'.google.cloud.datacatalog.v1beta1.EntryB\x03\xe0\x41\x02"\x82\x01\n\x12UpdateEntryRequest\x12;\n\x05\x65ntry\x18\x01 \x01(\x0b\x32\'.google.cloud.datacatalog.v1beta1.EntryB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"L\n\x12\x44\x65leteEntryRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n datacatalog.googleapis.com/Entry"I\n\x0fGetEntryRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n datacatalog.googleapis.com/Entry"V\n\x12LookupEntryRequest\x12\x19\n\x0flinked_resource\x18\x01 \x01(\tH\x00\x12\x16\n\x0csql_resource\x18\x03 \x01(\tH\x00\x42\r\n\x0btarget_name"\xf4\x05\n\x05\x45ntry\x12\x38\n\x04name\x18\x01 \x01(\tB*\xfa\x41\'\n%datacatalog.googleapis.com/EntryGroup\x12\x1c\n\x0flinked_resource\x18\t \x01(\tB\x03\xe0\x41\x03\x12;\n\x04type\x18\x02 \x01(\x0e\x32+.google.cloud.datacatalog.v1beta1.EntryTypeH\x00\x12L\n\x10gcs_fileset_spec\x18\x06 \x01(\x0b\x32\x30.google.cloud.datacatalog.v1beta1.GcsFilesetSpecH\x01\x12R\n\x13\x62igquery_table_spec\x18\x0c \x01(\x0b\x32\x33.google.cloud.datacatalog.v1beta1.BigQueryTableSpecH\x01\x12_\n\x1a\x62igquery_date_sharded_spec\x18\x0f \x01(\x0b\x32\x39.google.cloud.datacatalog.v1beta1.BigQueryDateShardedSpecH\x01\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x38\n\x06schema\x18\x05 \x01(\x0b\x32(.google.cloud.datacatalog.v1beta1.Schema\x12Y\n\x18source_system_timestamps\x18\x07 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.SystemTimestampsB\x03\xe0\x41\x03:x\xea\x41u\n datacatalog.googleapis.com/Entry\x12Qprojects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}B\x0c\n\nentry_typeB\x0b\n\ttype_spec"\x8e\x02\n\nEntryGroup\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12X\n\x17\x64\x61ta_catalog_timestamps\x18\x04 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.SystemTimestampsB\x03\xe0\x41\x03:m\xea\x41j\n%datacatalog.googleapis.com/EntryGroup\x12\x41projects/{project}/locations/{location}/entryGroups/{entry_group}"\xc2\x01\n\x18\x43reateTagTemplateRequest\x12>\n\x06parent\x18\x01 \x01(\tB.\xe0\x41\x02\xfa\x41(\x12&datacatalog.googleapis.com/TagTemplate\x12\x1c\n\x0ftag_template_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12H\n\x0ctag_template\x18\x02 \x01(\x0b\x32-.google.cloud.datacatalog.v1beta1.TagTemplateB\x03\xe0\x41\x02"U\n\x15GetTagTemplateRequest\x12<\n\x04name\x18\x01 \x01(\tB.\xe0\x41\x02\xfa\x41(\n&datacatalog.googleapis.com/TagTemplate"\x95\x01\n\x18UpdateTagTemplateRequest\x12H\n\x0ctag_template\x18\x01 \x01(\x0b\x32-.google.cloud.datacatalog.v1beta1.TagTemplateB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"l\n\x18\x44\x65leteTagTemplateRequest\x12<\n\x04name\x18\x01 \x01(\tB.\xe0\x41\x02\xfa\x41(\n&datacatalog.googleapis.com/TagTemplate\x12\x12\n\x05\x66orce\x18\x02 \x01(\x08\x42\x03\xe0\x41\x02"\x83\x01\n\x10\x43reateTagRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x64\x61tacatalog.googleapis.com/Tag\x12\x37\n\x03tag\x18\x02 \x01(\x0b\x32%.google.cloud.datacatalog.v1beta1.TagB\x03\xe0\x41\x02"|\n\x10UpdateTagRequest\x12\x37\n\x03tag\x18\x01 \x01(\x0b\x32%.google.cloud.datacatalog.v1beta1.TagB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"H\n\x10\x44\x65leteTagRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x64\x61tacatalog.googleapis.com/Tag"\xd8\x01\n\x1d\x43reateTagTemplateFieldRequest\x12>\n\x06parent\x18\x01 \x01(\tB.\xe0\x41\x02\xfa\x41(\n&datacatalog.googleapis.com/TagTemplate\x12"\n\x15tag_template_field_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12S\n\x12tag_template_field\x18\x03 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.TagTemplateFieldB\x03\xe0\x41\x02"\xe8\x01\n\x1dUpdateTagTemplateFieldRequest\x12\x41\n\x04name\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+datacatalog.googleapis.com/TagTemplateField\x12S\n\x12tag_template_field\x18\x02 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.TagTemplateFieldB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\x8a\x01\n\x1dRenameTagTemplateFieldRequest\x12\x41\n\x04name\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+datacatalog.googleapis.com/TagTemplateField\x12&\n\x19new_tag_template_field_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"v\n\x1d\x44\x65leteTagTemplateFieldRequest\x12\x41\n\x04name\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+datacatalog.googleapis.com/TagTemplateField\x12\x12\n\x05\x66orce\x18\x02 \x01(\x08\x42\x03\xe0\x41\x02"p\n\x0fListTagsRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x64\x61tacatalog.googleapis.com/Tag\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"`\n\x10ListTagsResponse\x12\x33\n\x04tags\x18\x01 \x03(\x0b\x32%.google.cloud.datacatalog.v1beta1.Tag\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t*P\n\tEntryType\x12\x1a\n\x16\x45NTRY_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05TABLE\x10\x02\x12\x0f\n\x0b\x44\x41TA_STREAM\x10\x03\x12\x0b\n\x07\x46ILESET\x10\x04\x32\xc8+\n\x0b\x44\x61taCatalog\x12\xb2\x01\n\rSearchCatalog\x12\x36.google.cloud.datacatalog.v1beta1.SearchCatalogRequest\x1a\x37.google.cloud.datacatalog.v1beta1.SearchCatalogResponse"0\x82\xd3\xe4\x93\x02\x1c"\x17/v1beta1/catalog:search:\x01*\xda\x41\x0bscope,query\x12\xea\x01\n\x10\x43reateEntryGroup\x12\x39.google.cloud.datacatalog.v1beta1.CreateEntryGroupRequest\x1a,.google.cloud.datacatalog.v1beta1.EntryGroup"m\x82\xd3\xe4\x93\x02\x43"4/v1beta1/{parent=projects/*/locations/*}/entryGroups:\x0b\x65ntry_group\xda\x41!parent,entry_group_id,entry_group\x12\xcb\x01\n\rGetEntryGroup\x12\x36.google.cloud.datacatalog.v1beta1.GetEntryGroupRequest\x1a,.google.cloud.datacatalog.v1beta1.EntryGroup"T\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta1/{name=projects/*/locations/*/entryGroups/*}\xda\x41\x04name\xda\x41\x0ename,read_mask\x12\xaa\x01\n\x10\x44\x65leteEntryGroup\x12\x39.google.cloud.datacatalog.v1beta1.DeleteEntryGroupRequest\x1a\x16.google.protobuf.Empty"C\x82\xd3\xe4\x93\x02\x36*4/v1beta1/{name=projects/*/locations/*/entryGroups/*}\xda\x41\x04name\x12\xd3\x01\n\x0b\x43reateEntry\x12\x34.google.cloud.datacatalog.v1beta1.CreateEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"e\x82\xd3\xe4\x93\x02G">/v1beta1/{parent=projects/*/locations/*/entryGroups/*}/entries:\x05\x65ntry\xda\x41\x15parent,entry_id,entry\x12\xdd\x01\n\x0bUpdateEntry\x12\x34.google.cloud.datacatalog.v1beta1.UpdateEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"o\x82\xd3\xe4\x93\x02M2D/v1beta1/{entry.name=projects/*/locations/*/entryGroups/*/entries/*}:\x05\x65ntry\xda\x41\x05\x65ntry\xda\x41\x11\x65ntry,update_mask\x12\xaa\x01\n\x0b\x44\x65leteEntry\x12\x34.google.cloud.datacatalog.v1beta1.DeleteEntryRequest\x1a\x16.google.protobuf.Empty"M\x82\xd3\xe4\x93\x02@*>/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}\xda\x41\x04name\x12\xb5\x01\n\x08GetEntry\x12\x31.google.cloud.datacatalog.v1beta1.GetEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"M\x82\xd3\xe4\x93\x02@\x12>/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}\xda\x41\x04name\x12\x8d\x01\n\x0bLookupEntry\x12\x34.google.cloud.datacatalog.v1beta1.LookupEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"\x1f\x82\xd3\xe4\x93\x02\x19\x12\x17/v1beta1/entries:lookup\x12\xf1\x01\n\x11\x43reateTagTemplate\x12:.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest\x1a-.google.cloud.datacatalog.v1beta1.TagTemplate"q\x82\xd3\xe4\x93\x02\x45"5/v1beta1/{parent=projects/*/locations/*}/tagTemplates:\x0ctag_template\xda\x41#parent,tag_template_id,tag_template\x12\xbe\x01\n\x0eGetTagTemplate\x12\x37.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest\x1a-.google.cloud.datacatalog.v1beta1.TagTemplate"D\x82\xd3\xe4\x93\x02\x37\x12\x35/v1beta1/{name=projects/*/locations/*/tagTemplates/*}\xda\x41\x04name\x12\x83\x02\n\x11UpdateTagTemplate\x12:.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest\x1a-.google.cloud.datacatalog.v1beta1.TagTemplate"\x82\x01\x82\xd3\xe4\x93\x02R2B/v1beta1/{tag_template.name=projects/*/locations/*/tagTemplates/*}:\x0ctag_template\xda\x41\x0ctag_template\xda\x41\x18tag_template,update_mask\x12\xb3\x01\n\x11\x44\x65leteTagTemplate\x12:.google.cloud.datacatalog.v1beta1.DeleteTagTemplateRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x37*5/v1beta1/{name=projects/*/locations/*/tagTemplates/*}\xda\x41\nname,force\x12\x9c\x02\n\x16\x43reateTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.CreateTagTemplateFieldRequest\x1a\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"\x8c\x01\x82\xd3\xe4\x93\x02T">/v1beta1/{parent=projects/*/locations/*/tagTemplates/*}/fields:\x12tag_template_field\xda\x41/parent,tag_template_field_id,tag_template_field\x12\xaa\x02\n\x16UpdateTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.UpdateTagTemplateFieldRequest\x1a\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"\x9a\x01\x82\xd3\xe4\x93\x02T2>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:\x12tag_template_field\xda\x41\x17name,tag_template_field\xda\x41#name,tag_template_field,update_mask\x12\x80\x02\n\x16RenameTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.RenameTagTemplateFieldRequest\x1a\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"q\x82\xd3\xe4\x93\x02J"E/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:rename:\x01*\xda\x41\x1ename,new_tag_template_field_id\x12\xc6\x01\n\x16\x44\x65leteTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.DeleteTagTemplateFieldRequest\x1a\x16.google.protobuf.Empty"S\x82\xd3\xe4\x93\x02@*>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}\xda\x41\nname,force\x12\xc7\x01\n\tCreateTag\x12\x32.google.cloud.datacatalog.v1beta1.CreateTagRequest\x1a%.google.cloud.datacatalog.v1beta1.Tag"_\x82\xd3\xe4\x93\x02L"E/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags:\x03tag\xda\x41\nparent,tag\x12\xd6\x01\n\tUpdateTag\x12\x32.google.cloud.datacatalog.v1beta1.UpdateTagRequest\x1a%.google.cloud.datacatalog.v1beta1.Tag"n\x82\xd3\xe4\x93\x02P2I/v1beta1/{tag.name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}:\x03tag\xda\x41\x03tag\xda\x41\x0ftag,update_mask\x12\xad\x01\n\tDeleteTag\x12\x32.google.cloud.datacatalog.v1beta1.DeleteTagRequest\x1a\x16.google.protobuf.Empty"T\x82\xd3\xe4\x93\x02G*E/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}\xda\x41\x04name\x12\xc9\x01\n\x08ListTags\x12\x31.google.cloud.datacatalog.v1beta1.ListTagsRequest\x1a\x32.google.cloud.datacatalog.v1beta1.ListTagsResponse"V\x82\xd3\xe4\x93\x02G\x12\x45/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags\xda\x41\x06parent\x12\xc0\x02\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"\xf4\x01\x82\xd3\xe4\x93\x02\xed\x01"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:setIamPolicy:\x01*ZJ"E/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:setIamPolicy:\x01*ZT"O/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:setIamPolicy:\x01*\x12\xc0\x02\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"\xf4\x01\x82\xd3\xe4\x93\x02\xed\x01"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:getIamPolicy:\x01*ZJ"E/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:getIamPolicy:\x01*ZT"O/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:getIamPolicy:\x01*\x12\xf2\x02\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"\x86\x02\x82\xd3\xe4\x93\x02\xff\x01"L/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:testIamPermissions:\x01*ZP"K/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:testIamPermissions:\x01*ZZ"U/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:testIamPermissions:\x01*\x1aN\xca\x41\x1a\x64\x61tacatalog.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_gcs__fileset__spec__pb2.DESCRIPTOR, google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_schema__pb2.DESCRIPTOR, google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_search__pb2.DESCRIPTOR, google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_table__spec__pb2.DESCRIPTOR, @@ -60,7 +69,6 @@ google_dot_iam_dot_v1_dot_policy__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -83,11 +91,14 @@ _descriptor.EnumValueDescriptor( name="DATA_STREAM", index=2, number=3, serialized_options=None, type=None ), + _descriptor.EnumValueDescriptor( + name="FILESET", index=3, number=4, serialized_options=None, type=None + ), ], containing_type=None, serialized_options=None, - serialized_start=2955, - serialized_end=3022, + serialized_start=4868, + serialized_end=4948, ) _sym_db.RegisterEnumDescriptor(_ENTRYTYPE) @@ -95,6 +106,7 @@ ENTRY_TYPE_UNSPECIFIED = 0 TABLE = 2 DATA_STREAM = 3 +FILESET = 4 _SEARCHCATALOGREQUEST_SCOPE = _descriptor.Descriptor( @@ -167,8 +179,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=717, - serialized_end=815, + serialized_start=855, + serialized_end=953, ) _SEARCHCATALOGREQUEST = _descriptor.Descriptor( @@ -193,7 +205,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -211,7 +223,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -247,7 +259,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -277,8 +289,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=544, - serialized_end=815, + serialized_start=667, + serialized_end=953, ) @@ -334,8 +346,262 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=817, - serialized_end=937, + serialized_start=955, + serialized_end=1075, +) + + +_CREATEENTRYGROUPREQUEST = _descriptor.Descriptor( + name="CreateEntryGroupRequest", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryGroupRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryGroupRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A'\022%datacatalog.googleapis.com/EntryGroup" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entry_group_id", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryGroupRequest.entry_group_id", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entry_group", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryGroupRequest.entry_group", + index=2, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1078, + serialized_end=1262, +) + + +_GETENTRYGROUPREQUEST = _descriptor.Descriptor( + name="GetEntryGroupRequest", + full_name="google.cloud.datacatalog.v1beta1.GetEntryGroupRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.datacatalog.v1beta1.GetEntryGroupRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A'\n%datacatalog.googleapis.com/EntryGroup" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_mask", + full_name="google.cloud.datacatalog.v1beta1.GetEntryGroupRequest.read_mask", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1265, + serialized_end=1395, +) + + +_DELETEENTRYGROUPREQUEST = _descriptor.Descriptor( + name="DeleteEntryGroupRequest", + full_name="google.cloud.datacatalog.v1beta1.DeleteEntryGroupRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.datacatalog.v1beta1.DeleteEntryGroupRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A'\n%datacatalog.googleapis.com/EntryGroup" + ), + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1397, + serialized_end=1483, +) + + +_CREATEENTRYREQUEST = _descriptor.Descriptor( + name="CreateEntryRequest", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A'\n%datacatalog.googleapis.com/EntryGroup" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entry_id", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryRequest.entry_id", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entry", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryRequest.entry", + index=2, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1486, + serialized_end=1653, ) @@ -361,7 +627,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -391,8 +657,47 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=939, - serialized_end=1064, + serialized_start=1656, + serialized_end=1786, +) + + +_DELETEENTRYREQUEST = _descriptor.Descriptor( + name="DeleteEntryRequest", + full_name="google.cloud.datacatalog.v1beta1.DeleteEntryRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.datacatalog.v1beta1.DeleteEntryRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b('\340A\002\372A"\n datacatalog.googleapis.com/Entry'), + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1788, + serialized_end=1864, ) @@ -418,7 +723,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\002\372A"\n datacatalog.googleapis.com/Entry'), file=DESCRIPTOR, ) ], @@ -430,8 +735,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1066, - serialized_end=1097, + serialized_start=1866, + serialized_end=1939, ) @@ -495,8 +800,8 @@ fields=[], ) ], - serialized_start=1099, - serialized_end=1185, + serialized_start=1941, + serialized_end=2027, ) @@ -522,7 +827,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\372A'\n%datacatalog.googleapis.com/EntryGroup"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -540,7 +845,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -561,10 +866,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="gcs_fileset_spec", + full_name="google.cloud.datacatalog.v1beta1.Entry.gcs_fileset_spec", + index=3, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="bigquery_table_spec", full_name="google.cloud.datacatalog.v1beta1.Entry.bigquery_table_spec", - index=3, + index=4, number=12, type=11, cpp_type=10, @@ -582,7 +905,7 @@ _descriptor.FieldDescriptor( name="bigquery_date_sharded_spec", full_name="google.cloud.datacatalog.v1beta1.Entry.bigquery_date_sharded_spec", - index=4, + index=5, number=15, type=11, cpp_type=10, @@ -598,10 +921,120 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.datacatalog.v1beta1.Entry.display_name", - index=5, - number=3, + name="display_name", + full_name="google.cloud.datacatalog.v1beta1.Entry.display_name", + index=6, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="google.cloud.datacatalog.v1beta1.Entry.description", + index=7, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="schema", + full_name="google.cloud.datacatalog.v1beta1.Entry.schema", + index=8, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_system_timestamps", + full_name="google.cloud.datacatalog.v1beta1.Entry.source_system_timestamps", + index=9, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b( + "\352Au\n datacatalog.googleapis.com/Entry\022Qprojects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}" + ), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="entry_type", + full_name="google.cloud.datacatalog.v1beta1.Entry.entry_type", + index=0, + containing_type=None, + fields=[], + ), + _descriptor.OneofDescriptor( + name="type_spec", + full_name="google.cloud.datacatalog.v1beta1.Entry.type_spec", + index=1, + containing_type=None, + fields=[], + ), + ], + serialized_start=2030, + serialized_end=2786, +) + + +_ENTRYGROUP = _descriptor.Descriptor( + name="EntryGroup", + full_name="google.cloud.datacatalog.v1beta1.EntryGroup", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.datacatalog.v1beta1.EntryGroup.name", + index=0, + number=1, type=9, cpp_type=9, label=1, @@ -616,10 +1049,10 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.datacatalog.v1beta1.Entry.description", - index=6, - number=4, + name="display_name", + full_name="google.cloud.datacatalog.v1beta1.EntryGroup.display_name", + index=1, + number=2, type=9, cpp_type=9, label=1, @@ -634,15 +1067,15 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="schema", - full_name="google.cloud.datacatalog.v1beta1.Entry.schema", - index=7, - number=5, - type=11, - cpp_type=10, + name="description", + full_name="google.cloud.datacatalog.v1beta1.EntryGroup.description", + index=2, + number=3, + type=9, + cpp_type=9, label=1, has_default_value=False, - default_value=None, + default_value=_b("").decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -652,10 +1085,10 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="source_system_timestamps", - full_name="google.cloud.datacatalog.v1beta1.Entry.source_system_timestamps", - index=8, - number=7, + name="data_catalog_timestamps", + full_name="google.cloud.datacatalog.v1beta1.EntryGroup.data_catalog_timestamps", + index=3, + number=4, type=11, cpp_type=10, label=1, @@ -666,28 +1099,22 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Aj\n%datacatalog.googleapis.com/EntryGroup\022Aprojects/{project}/locations/{location}/entryGroups/{entry_group}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type_spec", - full_name="google.cloud.datacatalog.v1beta1.Entry.type_spec", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1188, - serialized_end=1674, + oneofs=[], + serialized_start=2789, + serialized_end=3059, ) @@ -713,7 +1140,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A(\022&datacatalog.googleapis.com/TagTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -731,7 +1160,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -749,7 +1178,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -761,8 +1190,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1677, - serialized_end=1813, + serialized_start=3062, + serialized_end=3256, ) @@ -788,7 +1217,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A(\n&datacatalog.googleapis.com/TagTemplate" + ), file=DESCRIPTOR, ) ], @@ -800,8 +1231,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1815, - serialized_end=1852, + serialized_start=3258, + serialized_end=3343, ) @@ -827,7 +1258,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -857,8 +1288,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1855, - serialized_end=1999, + serialized_start=3346, + serialized_end=3495, ) @@ -884,7 +1315,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A(\n&datacatalog.googleapis.com/TagTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -902,7 +1335,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -914,8 +1347,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2001, - serialized_end=2056, + serialized_start=3497, + serialized_end=3605, ) @@ -941,7 +1374,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A \n\036datacatalog.googleapis.com/Tag" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -959,7 +1394,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -971,8 +1406,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2058, - serialized_end=2144, + serialized_start=3608, + serialized_end=3739, ) @@ -998,7 +1433,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1028,8 +1463,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2146, - serialized_end=2265, + serialized_start=3741, + serialized_end=3865, ) @@ -1055,7 +1490,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A \022\036datacatalog.googleapis.com/Tag" + ), file=DESCRIPTOR, ) ], @@ -1067,8 +1504,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2267, - serialized_end=2299, + serialized_start=3867, + serialized_end=3939, ) @@ -1094,7 +1531,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A(\n&datacatalog.googleapis.com/TagTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1112,7 +1551,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1130,7 +1569,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1142,8 +1581,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2302, - serialized_end=2460, + serialized_start=3942, + serialized_end=4158, ) @@ -1169,7 +1608,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+datacatalog.googleapis.com/TagTemplateField" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1187,7 +1628,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1217,8 +1658,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2463, - serialized_end=2637, + serialized_start=4161, + serialized_end=4393, ) @@ -1244,7 +1685,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+datacatalog.googleapis.com/TagTemplateField" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1262,7 +1705,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1274,8 +1717,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2639, - serialized_end=2719, + serialized_start=4396, + serialized_end=4534, ) @@ -1301,7 +1744,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+datacatalog.googleapis.com/TagTemplateField" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1319,7 +1764,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1331,8 +1776,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2721, - serialized_end=2781, + serialized_start=4536, + serialized_end=4654, ) @@ -1358,7 +1803,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A \022\036datacatalog.googleapis.com/Tag" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1406,8 +1853,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2783, - serialized_end=2855, + serialized_start=4656, + serialized_end=4768, ) @@ -1463,8 +1910,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2857, - serialized_end=2953, + serialized_start=4770, + serialized_end=4866, ) _SEARCHCATALOGREQUEST_SCOPE.containing_type = _SEARCHCATALOGREQUEST @@ -1474,6 +1921,11 @@ ].message_type = ( google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_search__pb2._SEARCHCATALOGRESULT ) +_CREATEENTRYGROUPREQUEST.fields_by_name["entry_group"].message_type = _ENTRYGROUP +_GETENTRYGROUPREQUEST.fields_by_name[ + "read_mask" +].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_CREATEENTRYREQUEST.fields_by_name["entry"].message_type = _ENTRY _UPDATEENTRYREQUEST.fields_by_name["entry"].message_type = _ENTRY _UPDATEENTRYREQUEST.fields_by_name[ "update_mask" @@ -1491,6 +1943,11 @@ "sql_resource" ].containing_oneof = _LOOKUPENTRYREQUEST.oneofs_by_name["target_name"] _ENTRY.fields_by_name["type"].enum_type = _ENTRYTYPE +_ENTRY.fields_by_name[ + "gcs_fileset_spec" +].message_type = ( + google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_gcs__fileset__spec__pb2._GCSFILESETSPEC +) _ENTRY.fields_by_name[ "bigquery_table_spec" ].message_type = ( @@ -1511,6 +1968,14 @@ ].message_type = ( google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2._SYSTEMTIMESTAMPS ) +_ENTRY.oneofs_by_name["entry_type"].fields.append(_ENTRY.fields_by_name["type"]) +_ENTRY.fields_by_name["type"].containing_oneof = _ENTRY.oneofs_by_name["entry_type"] +_ENTRY.oneofs_by_name["type_spec"].fields.append( + _ENTRY.fields_by_name["gcs_fileset_spec"] +) +_ENTRY.fields_by_name["gcs_fileset_spec"].containing_oneof = _ENTRY.oneofs_by_name[ + "type_spec" +] _ENTRY.oneofs_by_name["type_spec"].fields.append( _ENTRY.fields_by_name["bigquery_table_spec"] ) @@ -1523,6 +1988,11 @@ _ENTRY.fields_by_name[ "bigquery_date_sharded_spec" ].containing_oneof = _ENTRY.oneofs_by_name["type_spec"] +_ENTRYGROUP.fields_by_name[ + "data_catalog_timestamps" +].message_type = ( + google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2._SYSTEMTIMESTAMPS +) _CREATETAGTEMPLATEREQUEST.fields_by_name[ "tag_template" ].message_type = ( @@ -1563,10 +2033,16 @@ ].message_type = google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAG DESCRIPTOR.message_types_by_name["SearchCatalogRequest"] = _SEARCHCATALOGREQUEST DESCRIPTOR.message_types_by_name["SearchCatalogResponse"] = _SEARCHCATALOGRESPONSE +DESCRIPTOR.message_types_by_name["CreateEntryGroupRequest"] = _CREATEENTRYGROUPREQUEST +DESCRIPTOR.message_types_by_name["GetEntryGroupRequest"] = _GETENTRYGROUPREQUEST +DESCRIPTOR.message_types_by_name["DeleteEntryGroupRequest"] = _DELETEENTRYGROUPREQUEST +DESCRIPTOR.message_types_by_name["CreateEntryRequest"] = _CREATEENTRYREQUEST DESCRIPTOR.message_types_by_name["UpdateEntryRequest"] = _UPDATEENTRYREQUEST +DESCRIPTOR.message_types_by_name["DeleteEntryRequest"] = _DELETEENTRYREQUEST DESCRIPTOR.message_types_by_name["GetEntryRequest"] = _GETENTRYREQUEST DESCRIPTOR.message_types_by_name["LookupEntryRequest"] = _LOOKUPENTRYREQUEST DESCRIPTOR.message_types_by_name["Entry"] = _ENTRY +DESCRIPTOR.message_types_by_name["EntryGroup"] = _ENTRYGROUP DESCRIPTOR.message_types_by_name["CreateTagTemplateRequest"] = _CREATETAGTEMPLATEREQUEST DESCRIPTOR.message_types_by_name["GetTagTemplateRequest"] = _GETTAGTEMPLATEREQUEST DESCRIPTOR.message_types_by_name["UpdateTagTemplateRequest"] = _UPDATETAGTEMPLATEREQUEST @@ -1606,21 +2082,21 @@ include_org_ids: Data Catalog tries to automatically choose the right corpus of data to search through. You can ensure an organization is - included by adding it to "include\_org\_ids". You can ensure a - project's org is included with "include\_project\_ids". You + included by adding it to ``include_org_ids``. You can ensure a + project's org is included with ``include_project_ids``. You must specify at least one organization using - "include\_org\_ids" or "include\_project\_ids" in all search + ``include_org_ids`` or ``include_project_ids`` in all search requests. List of organization IDs to search within. To find your organization ID, follow instructions in https://cloud.google.com/resource-manager/docs/creating- - managing-organization + managing-organization. include_project_ids: List of project IDs to search within. To learn more about the distinction between project names/IDs/numbers, go to - https://cloud.google.com/docs/overview/#projects + https://cloud.google.com/docs/overview/#projects. include_gcp_public_datasets: - If true, include Google Cloud Platform (GCP) public datasets - in the search results. Info on GCP public datasets is + If ``true``, include Google Cloud Platform (GCP) public + datasets in the search results. Info on GCP public datasets is available at https://cloud.google.com/public-datasets/. By default, GCP public datasets are excluded. """, @@ -1649,22 +2125,20 @@ 10. Max limit for page\_size is 1000. Throws an invalid argument for page\_size > 1000. page_token: - Optional pagination token returned in an earlier [SearchCatalo - gResponse.next\_page\_token][google.cloud.datacatalog.v1beta1. - DataCatalog.SearchCatalogResponse.next\_page\_token]; - indicates that this is a continuation of a prior [SearchCatalo - g][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog] + Optional. Pagination token returned in an earlier [SearchCatal + ogResponse.next\_page\_token][google.cloud.datacatalog.v1beta1 + .SearchCatalogResponse.next\_page\_token], which indicates + that this is a continuation of a prior [SearchCatalogRequest][ + google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog] call, and that the system should return the next page of data. - If empty then the first page is returned. + If empty, the first page is returned. order_by: Specifies the ordering of results, currently supported case- - sensitive choices are: .. raw:: html
    .. raw:: html -
  • relevance .. raw:: html
  • .. raw:: html -
  • last\_access\_timestamp [asc\|desc], defaults to - descending if not specified, .. raw:: html
  • .. - raw:: html
  • last\_modified\_timestamp [asc\|desc], - defaults to descending if not specified. .. raw:: html -
  • .. raw:: html
+ sensitive choices are: - ``relevance``, only supports + desecending - ``last_access_timestamp [asc|desc]``, defaults + to descending if not specified - ``last_modified_timestamp + [asc|desc]``, defaults to descending if not specified If + not specified, defaults to ``relevance`` descending. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.SearchCatalogRequest) ), @@ -1684,7 +2158,7 @@ Attributes: results: - Search results in descending order of relevance. + Search results. next_page_token: The token that can be used to retrieve the next page of results. @@ -1694,6 +2168,104 @@ ) _sym_db.RegisterMessage(SearchCatalogResponse) +CreateEntryGroupRequest = _reflection.GeneratedProtocolMessageType( + "CreateEntryGroupRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEENTRYGROUPREQUEST, + __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", + __doc__="""Request message for + [CreateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup]. + + + Attributes: + parent: + Required. The name of the project this entry group is in. + Example: - projects/{project\_id}/locations/{location} Note + that this EntryGroup and its child resources may not actually + be stored in the location in this name. + entry_group_id: + Required. The id of the entry group to create. + entry_group: + The entry group to create. Defaults to an empty entry group. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.CreateEntryGroupRequest) + ), +) +_sym_db.RegisterMessage(CreateEntryGroupRequest) + +GetEntryGroupRequest = _reflection.GeneratedProtocolMessageType( + "GetEntryGroupRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETENTRYGROUPREQUEST, + __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", + __doc__="""Request message for + [GetEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup]. + + + Attributes: + name: + Required. The name of the entry group. For example, ``projects + /{project_id}/locations/{location}/entryGroups/{entry_group_id + }``. + read_mask: + The fields to return. If not set or empty, all fields are + returned. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.GetEntryGroupRequest) + ), +) +_sym_db.RegisterMessage(GetEntryGroupRequest) + +DeleteEntryGroupRequest = _reflection.GeneratedProtocolMessageType( + "DeleteEntryGroupRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEENTRYGROUPREQUEST, + __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", + __doc__="""Request message for + [DeleteEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup]. + + + Attributes: + name: + Required. The name of the entry group. For example, ``projects + /{project_id}/locations/{location}/entryGroups/{entry_group_id + }``. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.DeleteEntryGroupRequest) + ), +) +_sym_db.RegisterMessage(DeleteEntryGroupRequest) + +CreateEntryRequest = _reflection.GeneratedProtocolMessageType( + "CreateEntryRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEENTRYREQUEST, + __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", + __doc__="""Request message for + [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry]. + + + Attributes: + parent: + Required. The name of the entry group this entry is in. + Example: - projects/{project\_id}/locations/{location}/entry + Groups/{entry\_group\_id} Note that this Entry and its child + resources may not actually be stored in the location in this + name. + entry_id: + Required. The id of the entry to create. + entry: + Required. The entry to create. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.CreateEntryRequest) + ), +) +_sym_db.RegisterMessage(CreateEntryRequest) + UpdateEntryRequest = _reflection.GeneratedProtocolMessageType( "UpdateEntryRequest", (_message.Message,), @@ -1706,19 +2278,41 @@ Attributes: entry: - Required. The updated Entry. + Required. The updated entry. The "name" field must be set. update_mask: - Optional. The fields to update on the entry. If absent or - empty, all modifiable fields are updated. Modifiable fields - in synced entries: 1. schema (Pub/Sub topics only) - Modifiable fields in native entries: 1. display\_name 2. - description 3. schema + The fields to update on the entry. If absent or empty, all + modifiable fields are updated. The following fields are + modifiable: \* For entries with type ``DATA_STREAM``: \* + ``schema`` \* For entries with type ``FILESET`` \* ``schema`` + \* ``display_name`` \* ``description`` \* ``gcs_fileset_spec`` + \* ``gcs_fileset_spec.file_patterns`` """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.UpdateEntryRequest) ), ) _sym_db.RegisterMessage(UpdateEntryRequest) +DeleteEntryRequest = _reflection.GeneratedProtocolMessageType( + "DeleteEntryRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEENTRYREQUEST, + __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", + __doc__="""Request message for + [DeleteEntry][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry]. + + + Attributes: + name: + Required. The name of the entry. Example: - projects/{projec + t\_id}/locations/{location}/entryGroups/{entry\_group\_id}/ent + ries/{entry\_id} + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.DeleteEntryRequest) + ), +) +_sym_db.RegisterMessage(DeleteEntryRequest) + GetEntryRequest = _reflection.GeneratedProtocolMessageType( "GetEntryRequest", (_message.Message,), @@ -1731,9 +2325,13 @@ Attributes: name: - Required. The name of the entry. For example, "projects/{proje - ct\_id}/locations/{location}/entryGroups/{entry\_group\_id}/en - tries/{entry\_id}". + Required. The name of the entry. Example: - projects/{projec + t\_id}/locations/{location}/entryGroups/{entry\_group\_id}/ent + ries/{entry\_id} Entry groups are logical groupings of + entries. Currently, users cannot create/modify entry groups. + They are created by Data Catalog; they include ``@bigquery`` + for all BigQuery entries, and ``@pubsub`` for all Cloud + Pub/Sub entries. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.GetEntryRequest) ), @@ -1757,23 +2355,20 @@ linked_resource: The full name of the Google Cloud Platform resource the Data Catalog entry represents. See: https://cloud.google.com/apis/d - esign/resource\_names#full\_resource\_name Full names are - case-sensitive. Examples: "//bigquery.googleapis.com/projects - /projectId/datasets/datasetId/tables/tableId". - "//pubsub.googleapis.com/projects/projectId/topics/topicId" + esign/resource\_names#full\_resource\_name. Full names are + case-sensitive. Examples: - //bigquery.googleapis.com/proje + cts/projectId/datasets/datasetId/tables/tableId - + //pubsub.googleapis.com/projects/projectId/topics/topicId sql_resource: The SQL name of the entry. SQL names are case-sensitive. - Examples: .. raw:: html
    .. raw:: html
  • - cloud\_pubsub.project\_id.topic\_id .. raw:: html
  • - .. raw:: html
  • pubsub.project\_id.\ - ``topic.id.with.dots`` .. raw:: html
  • .. raw:: html -
  • bigquery.project\_id.dataset\_id.table\_id .. raw:: - html
  • .. raw:: html
  • datacatalog.project\_i - d.location\_id.entry\_group\_id.entry\_id .. raw:: html -
  • .. raw:: html
\*\_ids shoud satisfy the - standard SQL rules for identifiers. + Examples: - ``cloud_pubsub.project_id.topic_id`` - + ``pubsub.project_id.`topic.id.with.dots``` - + ``bigquery.project_id.dataset_id.table_id`` - + ``datacatalog.project_id.location_id.entry_group_id.entry_id`` + ``*_id``\ s shoud satisfy the standard SQL rules for + identifiers. https://cloud.google.com/bigquery/docs/reference/standard- - sql/lexical + sql/lexical. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.LookupEntryRequest) ), @@ -1787,9 +2382,9 @@ DESCRIPTOR=_ENTRY, __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", __doc__="""Entry Metadata. A Data Catalog Entry resource represents another - resource in Google Cloud Platform, such as a BigQuery Dataset or a - Pub/Sub Topic. Clients can use the ``linked_resource`` field in the - Entry resource to refer to the original resource id of the source + resource in Google Cloud Platform, such as a BigQuery dataset or a Cloud + Pub/Sub topic. Clients can use the ``linked_resource`` field in the + Entry resource to refer to the original resource ID of the source system. An Entry resource contains resource details, such as its schema. An @@ -1799,54 +2394,90 @@ Attributes: name: - Required when used in [UpdateEntryRequest][google.cloud.dataca - talog.v1beta1.UpdateEntryRequest]. The Data Catalog resource - name of the entry in URL format. For example, "projects/{proje - ct\_id}/locations/{location}/entryGroups/{entry\_group\_id}/en - tries/{entry\_id}". Note that this Entry and its child - resources may not actually be stored in the location in this - name. + The Data Catalog resource name of the entry in URL format. + Example: - projects/{project\_id}/locations/{location}/entry + Groups/{entry\_group\_id}/entries/{entry\_id} Note that this + Entry and its child resources may not actually be stored in + the location in this name. linked_resource: - Output only. The full name of the cloud resource the entry - belongs to. See: https://cloud.google.com/apis/design/resource - \_names#full\_resource\_name Data Catalog supports resources - from select Google Cloud Platform systems. ``linked_resource`` - is the full name of the Google Cloud Platform resource. For - example, the ``linked_resource`` for a table resource from - BigQuery is: "//bigquery.googleapis.com/projects/projectId/da - tasets/datasetId/tables/tableId". + Output only. The resource this metadata entry refers to. For + Google Cloud Platform resources, ``linked_resource`` is the + `full name of the resource `__. For example, the + ``linked_resource`` for a table resource from BigQuery is: - + //bigquery.googleapis.com/projects/projectId/datasets/datasetI + d/tables/tableId + entry_type: + Required. Entry type. type: - Required. Type of entry. + The type of the entry. type_spec: - Optional. Type specification information. + Type specification information. + gcs_fileset_spec: + Specification that applies to a Cloud Storage fileset. This is + only valid on entries of type FILESET. bigquery_table_spec: Specification that applies to a BigQuery table. This is only - valid on entries of type TABLE. + valid on entries of type ``TABLE``. bigquery_date_sharded_spec: Specification for a group of BigQuery tables with name pattern - [prefix]YYYYMMDD. Context: + ``[prefix]YYYYMMDD``. Context: https://cloud.google.com/bigquery/docs/partitioned- - tables#partitioning\_versus\_sharding + tables#partitioning\_versus\_sharding. display_name: - Optional. Display information such as title and description. A - short name to identify the entry, for example, "Analytics Data - - Jan 2011". Default value is an empty string. + Display information such as title and description. A short + name to identify the entry, for example, "Analytics Data - Jan + 2011". Default value is an empty string. description: - Optional. Entry description, which can consist of several - sentences or paragraphs that describe entry contents. Default - value is an empty string. + Entry description, which can consist of several sentences or + paragraphs that describe entry contents. Default value is an + empty string. schema: - Optional. Schema of the entry. An entry might not have any - schema attached to it. + Schema of the entry. An entry might not have any schema + attached to it. source_system_timestamps: Output only. Timestamps about the underlying Google Cloud - Platform resource -- not about this Data Catalog Entry. + Platform resource, not about this Data Catalog Entry. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.Entry) ), ) _sym_db.RegisterMessage(Entry) +EntryGroup = _reflection.GeneratedProtocolMessageType( + "EntryGroup", + (_message.Message,), + dict( + DESCRIPTOR=_ENTRYGROUP, + __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", + __doc__="""EntryGroup Metadata. An EntryGroup resource represents a logical + grouping of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1beta1.Entry] resources. + + + Attributes: + name: + The resource name of the entry group in URL format. Example: + - projects/{project\_id}/locations/{location}/entryGroups/{en + try\_group\_id} Note that this EntryGroup and its child + resources may not actually be stored in the location in this + name. + display_name: + A short name to identify the entry group, for example, + "analytics data - jan 2011". Default value is an empty string. + description: + Entry group description, which can consist of several + sentences or paragraphs that describe entry group contents. + Default value is an empty string. + data_catalog_timestamps: + Output only. Timestamps about this EntryGroup. Default value + is empty timestamps. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.EntryGroup) + ), +) +_sym_db.RegisterMessage(EntryGroup) + CreateTagTemplateRequest = _reflection.GeneratedProtocolMessageType( "CreateTagTemplateRequest", (_message.Message,), @@ -1860,10 +2491,10 @@ Attributes: parent: Required. The name of the project and the location this - template is in. Example: - "projects/{project\_id}/locations/{location}". Note that this - TagTemplate and its child resources may not actually be stored - in the location in this name. + template is in. Example: - + projects/{project\_id}/locations/{location} TagTemplate and + its child resources may not actually be stored in the location + in this name. tag_template_id: Required. The id of the tag template to create. tag_template: @@ -1886,9 +2517,9 @@ Attributes: name: - Required. The name of the tag template. For example, "projects - /{project\_id}/locations/{location}/tagTemplates/{tag\_templat - e\_id}". + Required. The name of the tag template. Example: - projects/ + {project\_id}/locations/{location}/tagTemplates/{tag\_template + \_id} """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.GetTagTemplateRequest) ), @@ -1907,12 +2538,12 @@ Attributes: tag_template: - Required. The template to update. + Required. The template to update. The "name" field must be + set. update_mask: - Optional. The field mask specifies the parts of the template - to overwrite. Allowed fields: - display\_name If - update\_mask is omitted, all of the allowed fields above will - be updated. + The field mask specifies the parts of the template to + overwrite. Allowed fields: - ``display_name`` If absent or + empty, all of the allowed fields above will be updated. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest) ), @@ -1931,13 +2562,14 @@ Attributes: name: - Required. The name of the tag template to delete. For example, - "projects/{project\_id}/locations/{location}/tagTemplates/{tag - \_template\_id}". + Required. The name of the tag template to delete. Example: - + projects/{project\_id}/locations/{location}/tagTemplates/{tag\ + _template\_id} force: - Required. Currently, this field must always be set to true. - This confirms the deletion of any possible tags using this - template. force = false will be supported in the future. + Required. Currently, this field must always be set to + ``true``. This confirms the deletion of any possible tags + using this template. ``force = false`` will be supported in + the future. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.DeleteTagTemplateRequest) ), @@ -1957,9 +2589,9 @@ Attributes: parent: Required. The name of the resource to attach this tag to. Tags - can be attached to Entries. (example: "projects/{project\_id}/ - locations/{location}/entryGroups/{entry\_group\_id}/entries/{e - ntry\_id}"). Note that this Tag and its child resources may + can be attached to Entries. Example: - projects/{project\_id + }/locations/{location}/entryGroups/{entry\_group\_id}/entries/ + {entry\_id} Note that this Tag and its child resources may not actually be stored in the location in this name. tag: Required. The tag to create. @@ -1981,11 +2613,11 @@ Attributes: tag: - Required. The updated tag. + Required. The updated tag. The "name" field must be set. update_mask: - Optional. The fields to update on the Tag. If absent or empty, - all modifiable fields are updated. Currently the only - modifiable field is the field ``fields``. + The fields to update on the Tag. If absent or empty, all + modifiable fields are updated. Currently the only modifiable + field is the field ``fields``. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.UpdateTagRequest) ), @@ -2004,9 +2636,9 @@ Attributes: name: - Required. The name of the tag to delete. For example, "project - s/{project\_id}/locations/{location}/entryGroups/{entry\_group - \_id}/entries/{entry\_id}/tags/{tag\_id}". + Required. The name of the tag to delete. Example: - projects + /{project\_id}/locations/{location}/entryGroups/{entry\_group\ + _id}/entries/{entry\_id}/tags/{tag\_id} """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.DeleteTagRequest) ), @@ -2026,15 +2658,15 @@ Attributes: parent: Required. The name of the project this template is in. - Example: "projects/{project\_id}/locations/{location}/tagTempl - ates/{tag\_template\_id}". Note that this TagTemplateField may - not actually be stored in the location in this name. + Example: - projects/{project\_id}/locations/{location}/tagTe + mplates/{tag\_template\_id} Note that this TagTemplateField + may not actually be stored in the location in this name. tag_template_field_id: - Required. The id of the tag template field to create. Field + Required. The ID of the tag template field to create. Field ids can contain letters (both uppercase and lowercase), - numbers (0-9), underscores (\_) and dashes (-). Field ids must + numbers (0-9), underscores (\_) and dashes (-). Field IDs must be at least 1 character long and at most 128 characters long. - Field ids must also be unique to their template. + Field IDs must also be unique within their template. tag_template_field: Required. The tag template field to create. """, @@ -2055,19 +2687,19 @@ Attributes: name: - Required. The name of the tag template field. For example, "pr - ojects/{project\_id}/locations/{location}/tagTemplates/{tag\_t - emplate\_id}/fields/{tag\_template\_field\_id}". + Required. The name of the tag template field. Example: - pro + jects/{project\_id}/locations/{location}/tagTemplates/{tag\_te + mplate\_id}/fields/{tag\_template\_field\_id} tag_template_field: Required. The template to update. update_mask: - Optional. The field mask specifies the parts of the template - to overwrite. Allowed fields: - display\_name - - type.enum\_type If update\_mask is omitted, all of the - allowed fields above will be updated. When updating an enum - type, the provided values will be merged with the existing - values. Therefore, enum values can only be added, existing - enum values cannot be deleted nor renamed. + The field mask specifies the parts of the template to be + updated. Allowed fields: - ``display_name`` - + ``type.enum_type`` If ``update_mask`` is not set or empty, + all of the allowed fields above will be updated. When + updating an enum type, the provided values will be merged with + the existing values. Therefore, enum values can only be added, + existing enum values cannot be deleted nor renamed. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.UpdateTagTemplateFieldRequest) ), @@ -2086,12 +2718,12 @@ Attributes: name: - Required. The name of the tag template. For example, "projects - /{project\_id}/locations/{location}/tagTemplates/{tag\_templat - e\_id}/fields/{tag\_template\_field\_id}". + Required. The name of the tag template. Example: - projects/ + {project\_id}/locations/{location}/tagTemplates/{tag\_template + \_id}/fields/{tag\_template\_field\_id} new_tag_template_field_id: Required. The new ID of this tag template field. For example, - "my\_new\_field". + ``my_new_field``. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.RenameTagTemplateFieldRequest) ), @@ -2110,13 +2742,14 @@ Attributes: name: - Required. The name of the tag template field to delete. For - example, "projects/{project\_id}/locations/{location}/tagTempl - ates/{tag\_template\_id}/fields/{tag\_template\_field\_id}". + Required. The name of the tag template field to delete. + Example: - projects/{project\_id}/locations/{location}/tagTe + mplates/{tag\_template\_id}/fields/{tag\_template\_field\_id} force: - Required. Currently, this field must always be set to true. - This confirms the deletion of this field from any tags using - this field. force = false will be supported in the future. + Required. Currently, this field must always be set to + ``true``. This confirms the deletion of this field from any + tags using this field. ``force = false`` will be supported in + the future. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.DeleteTagTemplateFieldRequest) ), @@ -2139,11 +2772,11 @@ tags of. The resource could be an [Entry][google.cloud.datacatalog.v1beta1.Entry]. page_size: - Optional. The maximum number of tags to return. Default is 10. - Max limit is 1000. + The maximum number of tags to return. Default is 10. Max limit + is 1000. page_token: - Optional. Token that specifies which page is requested. If - empty, the first page is returned. + Token that specifies which page is requested. If empty, the + first page is returned. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.ListTagsRequest) ), @@ -2174,6 +2807,48 @@ DESCRIPTOR._options = None +_SEARCHCATALOGREQUEST.fields_by_name["scope"]._options = None +_SEARCHCATALOGREQUEST.fields_by_name["query"]._options = None +_SEARCHCATALOGREQUEST.fields_by_name["page_token"]._options = None +_CREATEENTRYGROUPREQUEST.fields_by_name["parent"]._options = None +_CREATEENTRYGROUPREQUEST.fields_by_name["entry_group_id"]._options = None +_GETENTRYGROUPREQUEST.fields_by_name["name"]._options = None +_DELETEENTRYGROUPREQUEST.fields_by_name["name"]._options = None +_CREATEENTRYREQUEST.fields_by_name["parent"]._options = None +_CREATEENTRYREQUEST.fields_by_name["entry_id"]._options = None +_CREATEENTRYREQUEST.fields_by_name["entry"]._options = None +_UPDATEENTRYREQUEST.fields_by_name["entry"]._options = None +_DELETEENTRYREQUEST.fields_by_name["name"]._options = None +_GETENTRYREQUEST.fields_by_name["name"]._options = None +_ENTRY.fields_by_name["name"]._options = None +_ENTRY.fields_by_name["linked_resource"]._options = None +_ENTRY.fields_by_name["source_system_timestamps"]._options = None +_ENTRY._options = None +_ENTRYGROUP.fields_by_name["data_catalog_timestamps"]._options = None +_ENTRYGROUP._options = None +_CREATETAGTEMPLATEREQUEST.fields_by_name["parent"]._options = None +_CREATETAGTEMPLATEREQUEST.fields_by_name["tag_template_id"]._options = None +_CREATETAGTEMPLATEREQUEST.fields_by_name["tag_template"]._options = None +_GETTAGTEMPLATEREQUEST.fields_by_name["name"]._options = None +_UPDATETAGTEMPLATEREQUEST.fields_by_name["tag_template"]._options = None +_DELETETAGTEMPLATEREQUEST.fields_by_name["name"]._options = None +_DELETETAGTEMPLATEREQUEST.fields_by_name["force"]._options = None +_CREATETAGREQUEST.fields_by_name["parent"]._options = None +_CREATETAGREQUEST.fields_by_name["tag"]._options = None +_UPDATETAGREQUEST.fields_by_name["tag"]._options = None +_DELETETAGREQUEST.fields_by_name["name"]._options = None +_CREATETAGTEMPLATEFIELDREQUEST.fields_by_name["parent"]._options = None +_CREATETAGTEMPLATEFIELDREQUEST.fields_by_name["tag_template_field_id"]._options = None +_CREATETAGTEMPLATEFIELDREQUEST.fields_by_name["tag_template_field"]._options = None +_UPDATETAGTEMPLATEFIELDREQUEST.fields_by_name["name"]._options = None +_UPDATETAGTEMPLATEFIELDREQUEST.fields_by_name["tag_template_field"]._options = None +_RENAMETAGTEMPLATEFIELDREQUEST.fields_by_name["name"]._options = None +_RENAMETAGTEMPLATEFIELDREQUEST.fields_by_name[ + "new_tag_template_field_id" +]._options = None +_DELETETAGTEMPLATEFIELDREQUEST.fields_by_name["name"]._options = None +_DELETETAGTEMPLATEFIELDREQUEST.fields_by_name["force"]._options = None +_LISTTAGSREQUEST.fields_by_name["parent"]._options = None _DATACATALOG = _descriptor.ServiceDescriptor( name="DataCatalog", @@ -2183,8 +2858,8 @@ serialized_options=_b( "\312A\032datacatalog.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=3025, - serialized_end=6729, + serialized_start=4951, + serialized_end=10527, methods=[ _descriptor.MethodDescriptor( name="SearchCatalog", @@ -2194,35 +2869,90 @@ input_type=_SEARCHCATALOGREQUEST, output_type=_SEARCHCATALOGRESPONSE, serialized_options=_b( - '\202\323\344\223\002\034"\027/v1beta1/catalog:search:\001*' + '\202\323\344\223\002\034"\027/v1beta1/catalog:search:\001*\332A\013scope,query' + ), + ), + _descriptor.MethodDescriptor( + name="CreateEntryGroup", + full_name="google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup", + index=1, + containing_service=None, + input_type=_CREATEENTRYGROUPREQUEST, + output_type=_ENTRYGROUP, + serialized_options=_b( + '\202\323\344\223\002C"4/v1beta1/{parent=projects/*/locations/*}/entryGroups:\013entry_group\332A!parent,entry_group_id,entry_group' + ), + ), + _descriptor.MethodDescriptor( + name="GetEntryGroup", + full_name="google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup", + index=2, + containing_service=None, + input_type=_GETENTRYGROUPREQUEST, + output_type=_ENTRYGROUP, + serialized_options=_b( + "\202\323\344\223\0026\0224/v1beta1/{name=projects/*/locations/*/entryGroups/*}\332A\004name\332A\016name,read_mask" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteEntryGroup", + full_name="google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup", + index=3, + containing_service=None, + input_type=_DELETEENTRYGROUPREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + serialized_options=_b( + "\202\323\344\223\0026*4/v1beta1/{name=projects/*/locations/*/entryGroups/*}\332A\004name" + ), + ), + _descriptor.MethodDescriptor( + name="CreateEntry", + full_name="google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry", + index=4, + containing_service=None, + input_type=_CREATEENTRYREQUEST, + output_type=_ENTRY, + serialized_options=_b( + '\202\323\344\223\002G">/v1beta1/{parent=projects/*/locations/*/entryGroups/*}/entries:\005entry\332A\025parent,entry_id,entry' ), ), _descriptor.MethodDescriptor( name="UpdateEntry", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry", - index=1, + index=5, containing_service=None, input_type=_UPDATEENTRYREQUEST, output_type=_ENTRY, serialized_options=_b( - "\202\323\344\223\002M2D/v1beta1/{entry.name=projects/*/locations/*/entryGroups/*/entries/*}:\005entry" + "\202\323\344\223\002M2D/v1beta1/{entry.name=projects/*/locations/*/entryGroups/*/entries/*}:\005entry\332A\005entry\332A\021entry,update_mask" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteEntry", + full_name="google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry", + index=6, + containing_service=None, + input_type=_DELETEENTRYREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + serialized_options=_b( + "\202\323\344\223\002@*>/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}\332A\004name" ), ), _descriptor.MethodDescriptor( name="GetEntry", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry", - index=2, + index=7, containing_service=None, input_type=_GETENTRYREQUEST, output_type=_ENTRY, serialized_options=_b( - "\202\323\344\223\002@\022>/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}" + "\202\323\344\223\002@\022>/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}\332A\004name" ), ), _descriptor.MethodDescriptor( name="LookupEntry", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry", - index=3, + index=8, containing_service=None, input_type=_LOOKUPENTRYREQUEST, output_type=_ENTRY, @@ -2233,166 +2963,166 @@ _descriptor.MethodDescriptor( name="CreateTagTemplate", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate", - index=4, + index=9, containing_service=None, input_type=_CREATETAGTEMPLATEREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAGTEMPLATE, serialized_options=_b( - '\202\323\344\223\002E"5/v1beta1/{parent=projects/*/locations/*}/tagTemplates:\014tag_template' + '\202\323\344\223\002E"5/v1beta1/{parent=projects/*/locations/*}/tagTemplates:\014tag_template\332A#parent,tag_template_id,tag_template' ), ), _descriptor.MethodDescriptor( name="GetTagTemplate", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate", - index=5, + index=10, containing_service=None, input_type=_GETTAGTEMPLATEREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAGTEMPLATE, serialized_options=_b( - "\202\323\344\223\0027\0225/v1beta1/{name=projects/*/locations/*/tagTemplates/*}" + "\202\323\344\223\0027\0225/v1beta1/{name=projects/*/locations/*/tagTemplates/*}\332A\004name" ), ), _descriptor.MethodDescriptor( name="UpdateTagTemplate", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate", - index=6, + index=11, containing_service=None, input_type=_UPDATETAGTEMPLATEREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAGTEMPLATE, serialized_options=_b( - "\202\323\344\223\002R2B/v1beta1/{tag_template.name=projects/*/locations/*/tagTemplates/*}:\014tag_template" + "\202\323\344\223\002R2B/v1beta1/{tag_template.name=projects/*/locations/*/tagTemplates/*}:\014tag_template\332A\014tag_template\332A\030tag_template,update_mask" ), ), _descriptor.MethodDescriptor( name="DeleteTagTemplate", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate", - index=7, + index=12, containing_service=None, input_type=_DELETETAGTEMPLATEREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\0027*5/v1beta1/{name=projects/*/locations/*/tagTemplates/*}" + "\202\323\344\223\0027*5/v1beta1/{name=projects/*/locations/*/tagTemplates/*}\332A\nname,force" ), ), _descriptor.MethodDescriptor( name="CreateTagTemplateField", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField", - index=8, + index=13, containing_service=None, input_type=_CREATETAGTEMPLATEFIELDREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAGTEMPLATEFIELD, serialized_options=_b( - '\202\323\344\223\002T">/v1beta1/{parent=projects/*/locations/*/tagTemplates/*}/fields:\022tag_template_field' + '\202\323\344\223\002T">/v1beta1/{parent=projects/*/locations/*/tagTemplates/*}/fields:\022tag_template_field\332A/parent,tag_template_field_id,tag_template_field' ), ), _descriptor.MethodDescriptor( name="UpdateTagTemplateField", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField", - index=9, + index=14, containing_service=None, input_type=_UPDATETAGTEMPLATEFIELDREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAGTEMPLATEFIELD, serialized_options=_b( - "\202\323\344\223\002T2>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:\022tag_template_field" + "\202\323\344\223\002T2>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:\022tag_template_field\332A\027name,tag_template_field\332A#name,tag_template_field,update_mask" ), ), _descriptor.MethodDescriptor( name="RenameTagTemplateField", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField", - index=10, + index=15, containing_service=None, input_type=_RENAMETAGTEMPLATEFIELDREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAGTEMPLATEFIELD, serialized_options=_b( - '\202\323\344\223\002J"E/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:rename:\001*' + '\202\323\344\223\002J"E/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:rename:\001*\332A\036name,new_tag_template_field_id' ), ), _descriptor.MethodDescriptor( name="DeleteTagTemplateField", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField", - index=11, + index=16, containing_service=None, input_type=_DELETETAGTEMPLATEFIELDREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002@*>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}" + "\202\323\344\223\002@*>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}\332A\nname,force" ), ), _descriptor.MethodDescriptor( name="CreateTag", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag", - index=12, + index=17, containing_service=None, input_type=_CREATETAGREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAG, serialized_options=_b( - '\202\323\344\223\002L"E/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags:\003tag' + '\202\323\344\223\002L"E/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags:\003tag\332A\nparent,tag' ), ), _descriptor.MethodDescriptor( name="UpdateTag", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag", - index=13, + index=18, containing_service=None, input_type=_UPDATETAGREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAG, serialized_options=_b( - "\202\323\344\223\002P2I/v1beta1/{tag.name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}:\003tag" + "\202\323\344\223\002P2I/v1beta1/{tag.name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}:\003tag\332A\003tag\332A\017tag,update_mask" ), ), _descriptor.MethodDescriptor( name="DeleteTag", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag", - index=14, + index=19, containing_service=None, input_type=_DELETETAGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002G*E/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}" + "\202\323\344\223\002G*E/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}\332A\004name" ), ), _descriptor.MethodDescriptor( name="ListTags", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.ListTags", - index=15, + index=20, containing_service=None, input_type=_LISTTAGSREQUEST, output_type=_LISTTAGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002G\022E/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags" + "\202\323\344\223\002G\022E/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags\332A\006parent" ), ), _descriptor.MethodDescriptor( name="SetIamPolicy", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.SetIamPolicy", - index=16, + index=21, containing_service=None, input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._SETIAMPOLICYREQUEST, output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, serialized_options=_b( - '\202\323\344\223\002K"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:setIamPolicy:\001*' + '\202\323\344\223\002\355\001"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:setIamPolicy:\001*ZJ"E/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:setIamPolicy:\001*ZT"O/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:setIamPolicy:\001*' ), ), _descriptor.MethodDescriptor( name="GetIamPolicy", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.GetIamPolicy", - index=17, + index=22, containing_service=None, input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._GETIAMPOLICYREQUEST, output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, serialized_options=_b( - '\202\323\344\223\002K"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:getIamPolicy:\001*' + '\202\323\344\223\002\355\001"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:getIamPolicy:\001*ZJ"E/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:getIamPolicy:\001*ZT"O/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:getIamPolicy:\001*' ), ), _descriptor.MethodDescriptor( name="TestIamPermissions", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.TestIamPermissions", - index=18, + index=23, containing_service=None, input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSREQUEST, output_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSRESPONSE, serialized_options=_b( - '\202\323\344\223\002Q"L/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:testIamPermissions:\001*' + '\202\323\344\223\002\377\001"L/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:testIamPermissions:\001*ZP"K/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:testIamPermissions:\001*ZZ"U/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:testIamPermissions:\001*' ), ), ], diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2_grpc.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2_grpc.py index c9af06b41c0c..3364f7f20fb4 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2_grpc.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2_grpc.py @@ -28,11 +28,36 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.SearchCatalogRequest.SerializeToString, response_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.SearchCatalogResponse.FromString, ) + self.CreateEntryGroup = channel.unary_unary( + "/google.cloud.datacatalog.v1beta1.DataCatalog/CreateEntryGroup", + request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.CreateEntryGroupRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.EntryGroup.FromString, + ) + self.GetEntryGroup = channel.unary_unary( + "/google.cloud.datacatalog.v1beta1.DataCatalog/GetEntryGroup", + request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.GetEntryGroupRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.EntryGroup.FromString, + ) + self.DeleteEntryGroup = channel.unary_unary( + "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntryGroup", + request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.DeleteEntryGroupRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.CreateEntry = channel.unary_unary( + "/google.cloud.datacatalog.v1beta1.DataCatalog/CreateEntry", + request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.CreateEntryRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.Entry.FromString, + ) self.UpdateEntry = channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateEntry", request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.UpdateEntryRequest.SerializeToString, response_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.Entry.FromString, ) + self.DeleteEntry = channel.unary_unary( + "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntry", + request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.DeleteEntryRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) self.GetEntry = channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/GetEntry", request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.GetEntryRequest.SerializeToString, @@ -132,7 +157,7 @@ def SearchCatalog(self, request, context): This is a custom method (https://cloud.google.com/apis/design/custom_methods) and does not return the complete resource, only the resource identifier and high level - fields. Clients can subsequentally call Get methods. + fields. Clients can subsequentally call `Get` methods. Note that searches do not have full recall. There may be results that match your query but are not returned, even in subsequent pages of results. These @@ -146,8 +171,65 @@ def SearchCatalog(self, request, context): context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") + def CreateEntryGroup(self, request, context): + """Alpha feature. + Creates an EntryGroup. + The user should enable the Data Catalog API in the project identified by + the `parent` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetEntryGroup(self, request, context): + """Alpha feature. + Gets an EntryGroup. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteEntryGroup(self, request, context): + """Alpha feature. + Deletes an EntryGroup. Only entry groups that do not contain entries can be + deleted. The user should enable the Data Catalog API in the project + identified by the `name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def CreateEntry(self, request, context): + """Alpha feature. + Creates an entry. Currently only entries of 'FILESET' type can be created. + The user should enable the Data Catalog API in the project identified by + the `parent` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + def UpdateEntry(self, request, context): """Updates an existing entry. + The user should enable the Data Catalog API in the project identified by + the `entry.name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteEntry(self, request, context): + """Alpha feature. + Deletes an existing entry. Only entries created through + [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry] + method can be deleted. + The user should enable the Data Catalog API in the project identified by + the `name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -170,7 +252,10 @@ def LookupEntry(self, request, context): raise NotImplementedError("Method not implemented!") def CreateTagTemplate(self, request, context): - """Creates a tag template. + """Creates a tag template. The user should enable the Data Catalog API in + the project identified by the `parent` parameter (see [Data Catalog + Resource Project](/data-catalog/docs/concepts/resource-project) for more + information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -187,6 +272,9 @@ def UpdateTagTemplate(self, request, context): """Updates a tag template. This method cannot be used to update the fields of a template. The tag template fields are represented as separate resources and should be updated using their own create/update/delete methods. + The user should enable the Data Catalog API in the project identified by + the `tag_template.name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -194,13 +282,20 @@ def UpdateTagTemplate(self, request, context): def DeleteTagTemplate(self, request, context): """Deletes a tag template and all tags using the template. + The user should enable the Data Catalog API in the project identified by + the `name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def CreateTagTemplateField(self, request, context): - """Creates a field in a tag template. + """Creates a field in a tag template. The user should enable the Data Catalog + API in the project identified by the `parent` parameter (see + [Data Catalog Resource + Project](/data-catalog/docs/concepts/resource-project) for more + information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -208,14 +303,19 @@ def CreateTagTemplateField(self, request, context): def UpdateTagTemplateField(self, request, context): """Updates a field in a tag template. This method cannot be used to update the - field type. + field type. The user should enable the Data Catalog API in the project + identified by the `name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def RenameTagTemplateField(self, request, context): - """Renames a field in a tag template. + """Renames a field in a tag template. The user should enable the Data Catalog + API in the project identified by the `name` parameter (see [Data Catalog + Resource Project](/data-catalog/docs/concepts/resource-project) for more + information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -223,6 +323,9 @@ def RenameTagTemplateField(self, request, context): def DeleteTagTemplateField(self, request, context): """Deletes a field in a tag template and all uses of that field. + The user should enable the Data Catalog API in the project identified by + the `name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -230,6 +333,12 @@ def DeleteTagTemplateField(self, request, context): def CreateTag(self, request, context): """Creates a tag on an [Entry][google.cloud.datacatalog.v1beta1.Entry]. + Note: The project identified by the `parent` parameter for the + [tag](/data-catalog/docs/reference/rest/v1beta1/projects.locations.entryGroups.entries.tags/create#path-parameters) + and the + [tag + template](/data-catalog/docs/reference/rest/v1beta1/projects.locations.tagTemplates/create#path-parameters) + used to create the tag must be from the same organization. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -261,12 +370,17 @@ def SetIamPolicy(self, request, context): policy. Supported resources are: - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. Callers must have following Google IAM permission - `datacatalog.tagTemplates.setIamPolicy` to set policies on tag templates. + - `datacatalog.tagTemplates.setIamPolicy` to set policies on tag + templates. + - `datacatalog.entries.setIamPolicy` to set policies on entries. + - `datacatalog.entryGroups.setIamPolicy` to set policies on entry groups. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -279,12 +393,17 @@ def GetIamPolicy(self, request, context): Supported resources are: - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. Callers must have following Google IAM permission - `datacatalog.tagTemplates.getIamPolicy` to get policies on tag templates. + - `datacatalog.tagTemplates.getIamPolicy` to get policies on tag + templates. + - `datacatalog.entries.getIamPolicy` to get policies on entries. + - `datacatalog.entryGroups.getIamPolicy` to get policies on entry groups. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -295,8 +414,10 @@ def TestIamPermissions(self, request, context): If the resource does not exist, an empty set of permissions is returned (We don't return a `NOT_FOUND` error). - Supported resource are: - - tag templates. + Supported resources are: + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. @@ -316,11 +437,36 @@ def add_DataCatalogServicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.SearchCatalogRequest.FromString, response_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.SearchCatalogResponse.SerializeToString, ), + "CreateEntryGroup": grpc.unary_unary_rpc_method_handler( + servicer.CreateEntryGroup, + request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.CreateEntryGroupRequest.FromString, + response_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.EntryGroup.SerializeToString, + ), + "GetEntryGroup": grpc.unary_unary_rpc_method_handler( + servicer.GetEntryGroup, + request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.GetEntryGroupRequest.FromString, + response_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.EntryGroup.SerializeToString, + ), + "DeleteEntryGroup": grpc.unary_unary_rpc_method_handler( + servicer.DeleteEntryGroup, + request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.DeleteEntryGroupRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "CreateEntry": grpc.unary_unary_rpc_method_handler( + servicer.CreateEntry, + request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.CreateEntryRequest.FromString, + response_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.Entry.SerializeToString, + ), "UpdateEntry": grpc.unary_unary_rpc_method_handler( servicer.UpdateEntry, request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.UpdateEntryRequest.FromString, response_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.Entry.SerializeToString, ), + "DeleteEntry": grpc.unary_unary_rpc_method_handler( + servicer.DeleteEntry, + request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.DeleteEntryRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), "GetEntry": grpc.unary_unary_rpc_method_handler( servicer.GetEntry, request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.GetEntryRequest.FromString, diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto new file mode 100644 index 000000000000..e7397d054365 --- /dev/null +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto @@ -0,0 +1,59 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.datacatalog.v1beta1; + +import "google/api/field_behavior.proto"; +import "google/cloud/datacatalog/v1beta1/timestamps.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog"; +option java_multiple_files = true; +option java_package = "com.google.cloud.datacatalog"; + +// Describes a Cloud Storage fileset entry. +message GcsFilesetSpec { + // Required. Patterns to identify a set of files in Google Cloud Storage. + // + // Examples of valid file_patterns: + // + // * `gs://bucket_name/*`: matches all files in `bucket_name` + // * `gs://bucket_name/file*`: matches files prefixed by `file` in + // `bucket_name` + // * `gs://bucket_name/a/*/b`: matches all files in `bucket_name` that match + // `a/*/b` pattern, such as `a/c/b`, `a/d/b` + // * `gs://another_bucket/a.txt`: matches `gs://another_bucket/a.txt` + repeated string file_patterns = 1 [(google.api.field_behavior) = REQUIRED]; + + // Output only. Sample files contained in this fileset, not all files + // contained in this fileset are represented here. + repeated GcsFileSpec sample_gcs_file_specs = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Specifications of a single file in GCS. +message GcsFileSpec { + // Required. The full file path. Example: `gs://bucket_name/a/b.txt`. + string file_path = 1 [(google.api.field_behavior) = REQUIRED]; + + // Output only. Timestamps about the GCS file. + SystemTimestamps gcs_timestamps = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The size of the file, in bytes. + int64 size_bytes = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; +} diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec_pb2.py new file mode 100644 index 000000000000..94aee77c5677 --- /dev/null +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec_pb2.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.cloud.datacatalog_v1beta1.proto import ( + timestamps_pb2 as google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2, +) + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto", + package="google.cloud.datacatalog.v1beta1", + syntax="proto3", + serialized_options=_b( + "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" + ), + serialized_pb=_b( + '\n=google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x37google/cloud/datacatalog_v1beta1/proto/timestamps.proto"\x7f\n\x0eGcsFilesetSpec\x12\x1a\n\rfile_patterns\x18\x01 \x03(\tB\x03\xe0\x41\x02\x12Q\n\x15sample_gcs_file_specs\x18\x02 \x03(\x0b\x32-.google.cloud.datacatalog.v1beta1.GcsFileSpecB\x03\xe0\x41\x03"\x8f\x01\n\x0bGcsFileSpec\x12\x16\n\tfile_path\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12O\n\x0egcs_timestamps\x18\x02 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.SystemTimestampsB\x03\xe0\x41\x03\x12\x17\n\nsize_bytes\x18\x04 \x01(\x03\x42\x03\xe0\x41\x03\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + ), + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2.DESCRIPTOR, + ], +) + + +_GCSFILESETSPEC = _descriptor.Descriptor( + name="GcsFilesetSpec", + full_name="google.cloud.datacatalog.v1beta1.GcsFilesetSpec", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="file_patterns", + full_name="google.cloud.datacatalog.v1beta1.GcsFilesetSpec.file_patterns", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="sample_gcs_file_specs", + full_name="google.cloud.datacatalog.v1beta1.GcsFilesetSpec.sample_gcs_file_specs", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=189, + serialized_end=316, +) + + +_GCSFILESPEC = _descriptor.Descriptor( + name="GcsFileSpec", + full_name="google.cloud.datacatalog.v1beta1.GcsFileSpec", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="file_path", + full_name="google.cloud.datacatalog.v1beta1.GcsFileSpec.file_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="gcs_timestamps", + full_name="google.cloud.datacatalog.v1beta1.GcsFileSpec.gcs_timestamps", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="size_bytes", + full_name="google.cloud.datacatalog.v1beta1.GcsFileSpec.size_bytes", + index=2, + number=4, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=319, + serialized_end=462, +) + +_GCSFILESETSPEC.fields_by_name["sample_gcs_file_specs"].message_type = _GCSFILESPEC +_GCSFILESPEC.fields_by_name[ + "gcs_timestamps" +].message_type = ( + google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2._SYSTEMTIMESTAMPS +) +DESCRIPTOR.message_types_by_name["GcsFilesetSpec"] = _GCSFILESETSPEC +DESCRIPTOR.message_types_by_name["GcsFileSpec"] = _GCSFILESPEC +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +GcsFilesetSpec = _reflection.GeneratedProtocolMessageType( + "GcsFilesetSpec", + (_message.Message,), + dict( + DESCRIPTOR=_GCSFILESETSPEC, + __module__="google.cloud.datacatalog_v1beta1.proto.gcs_fileset_spec_pb2", + __doc__="""Describes a Cloud Storage fileset entry. + + + Attributes: + file_patterns: + Required. Patterns to identify a set of files in Google Cloud + Storage. Examples of valid file\_patterns: - + ``gs://bucket_name/*``: matches all files in ``bucket_name`` - + ``gs://bucket_name/file*``: matches files prefixed by ``file`` + in ``bucket_name`` - ``gs://bucket_name/a/*/b``: matches + all files in ``bucket_name`` that match ``a/*/b`` pattern, + such as ``a/c/b``, ``a/d/b`` - ``gs://another_bucket/a.txt``: + matches ``gs://another_bucket/a.txt`` + sample_gcs_file_specs: + Output only. Sample files contained in this fileset, not all + files contained in this fileset are represented here. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.GcsFilesetSpec) + ), +) +_sym_db.RegisterMessage(GcsFilesetSpec) + +GcsFileSpec = _reflection.GeneratedProtocolMessageType( + "GcsFileSpec", + (_message.Message,), + dict( + DESCRIPTOR=_GCSFILESPEC, + __module__="google.cloud.datacatalog_v1beta1.proto.gcs_fileset_spec_pb2", + __doc__="""Specifications of a single file in GCS. + + + Attributes: + file_path: + Required. The full file path. Example: + ``gs://bucket_name/a/b.txt``. + gcs_timestamps: + Output only. Timestamps about the GCS file. + size_bytes: + Output only. The size of the file, in bytes. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.GcsFileSpec) + ), +) +_sym_db.RegisterMessage(GcsFileSpec) + + +DESCRIPTOR._options = None +_GCSFILESETSPEC.fields_by_name["file_patterns"]._options = None +_GCSFILESETSPEC.fields_by_name["sample_gcs_file_specs"]._options = None +_GCSFILESPEC.fields_by_name["file_path"]._options = None +_GCSFILESPEC.fields_by_name["gcs_timestamps"]._options = None +_GCSFILESPEC.fields_by_name["size_bytes"]._options = None +# @@protoc_insertion_point(module_scope) diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec_pb2_grpc.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema.proto index 839ef1d8ca50..aca588b4503b 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema.proto +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema.proto @@ -17,6 +17,8 @@ syntax = "proto3"; package google.cloud.datacatalog.v1beta1; +import "google/api/field_behavior.proto"; + option cc_enable_arenas = true; option go_package = "google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog"; option java_multiple_files = true; @@ -26,27 +28,27 @@ option java_package = "com.google.cloud.datacatalog"; message Schema { // Required. Schema of columns. A maximum of 10,000 columns and sub-columns // can be specified. - repeated ColumnSchema columns = 2; + repeated ColumnSchema columns = 2 [(google.api.field_behavior) = REQUIRED]; } // Representation of a column within a schema. Columns could be nested inside // other columns. message ColumnSchema { // Required. Name of the column. - string column = 6; + string column = 6 [(google.api.field_behavior) = REQUIRED]; // Required. Type of the column. - string type = 1; + string type = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Description of the column. Default value is an empty string. - string description = 2; + string description = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. A column's mode indicates whether the values in this column are - // required, nullable, etc. Only 'NULLABLE', 'REQUIRED' and 'REPEATED' are - // supported. Default mode is 'NULLABLE'. - string mode = 3; + // required, nullable, etc. Only `NULLABLE`, `REQUIRED` and `REPEATED` are + // supported. Default mode is `NULLABLE`. + string mode = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Schema of sub-columns. A column can have zero or more // sub-columns. - repeated ColumnSchema subcolumns = 7; + repeated ColumnSchema subcolumns = 7 [(google.api.field_behavior) = OPTIONAL]; } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2.py index e7643ae11df4..b5ac8e07cc8c 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2.py @@ -15,6 +15,9 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 + + DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/datacatalog_v1beta1/proto/schema.proto", package="google.cloud.datacatalog.v1beta1", @@ -23,8 +26,9 @@ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" ), serialized_pb=_b( - '\n3google/cloud/datacatalog_v1beta1/proto/schema.proto\x12 google.cloud.datacatalog.v1beta1"I\n\x06Schema\x12?\n\x07\x63olumns\x18\x02 \x03(\x0b\x32..google.cloud.datacatalog.v1beta1.ColumnSchema"\x93\x01\n\x0c\x43olumnSchema\x12\x0e\n\x06\x63olumn\x18\x06 \x01(\t\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04mode\x18\x03 \x01(\t\x12\x42\n\nsubcolumns\x18\x07 \x03(\x0b\x32..google.cloud.datacatalog.v1beta1.ColumnSchemaBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + '\n3google/cloud/datacatalog_v1beta1/proto/schema.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/api/field_behavior.proto"N\n\x06Schema\x12\x44\n\x07\x63olumns\x18\x02 \x03(\x0b\x32..google.cloud.datacatalog.v1beta1.ColumnSchemaB\x03\xe0\x41\x02"\xac\x01\n\x0c\x43olumnSchema\x12\x13\n\x06\x63olumn\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04type\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x11\n\x04mode\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12G\n\nsubcolumns\x18\x07 \x03(\x0b\x32..google.cloud.datacatalog.v1beta1.ColumnSchemaB\x03\xe0\x41\x01\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' ), + dependencies=[google_dot_api_dot_field__behavior__pb2.DESCRIPTOR], ) @@ -50,7 +54,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -62,8 +66,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=89, - serialized_end=162, + serialized_start=122, + serialized_end=200, ) @@ -89,7 +93,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -107,7 +111,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -125,7 +129,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -143,7 +147,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -161,7 +165,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -173,8 +177,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=165, - serialized_end=312, + serialized_start=203, + serialized_end=375, ) _SCHEMA.fields_by_name["columns"].message_type = _COLUMNSCHEMA @@ -222,9 +226,9 @@ string. mode: Optional. A column's mode indicates whether the values in this - column are required, nullable, etc. Only 'NULLABLE', - 'REQUIRED' and 'REPEATED' are supported. Default mode is - 'NULLABLE'. + column are required, nullable, etc. Only ``NULLABLE``, + ``REQUIRED`` and ``REPEATED`` are supported. Default mode is + ``NULLABLE``. subcolumns: Optional. Schema of sub-columns. A column can have zero or more sub-columns. @@ -236,4 +240,10 @@ DESCRIPTOR._options = None +_SCHEMA.fields_by_name["columns"]._options = None +_COLUMNSCHEMA.fields_by_name["column"]._options = None +_COLUMNSCHEMA.fields_by_name["type"]._options = None +_COLUMNSCHEMA.fields_by_name["description"]._options = None +_COLUMNSCHEMA.fields_by_name["mode"]._options = None +_COLUMNSCHEMA.fields_by_name["subcolumns"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/search.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/search.proto index 2a31dd94543a..372c1573c3db 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/search.proto +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/search.proto @@ -17,7 +17,7 @@ syntax = "proto3"; package google.cloud.datacatalog.v1beta1; -import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; option cc_enable_arenas = true; @@ -34,20 +34,22 @@ message SearchCatalogResult { // Sub-type of the search result. This is a dot-delimited description of the // resource's full type, and is the same as the value callers would provide in - // the "type" search facet. Examples: "entry.table", "entry.dataStream", - // "tagTemplate" + // the "type" search facet. Examples: `entry.table`, `entry.dataStream`, + // `tagTemplate`. string search_result_subtype = 2; // The relative resource name of the resource in URL format. // Examples: - // "projects/{project_id}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}". - // "projects/{project_id}/tagTemplates/{tag_template_id}". + // + // * `projects/{project_id}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}` + // * `projects/{project_id}/tagTemplates/{tag_template_id}` string relative_resource_name = 3; // The full name of the cloud resource the entry belongs to. See: - // https://cloud.google.com/apis/design/resource_names#full_resource_name + // https://cloud.google.com/apis/design/resource_names#full_resource_name. // Example: - // "//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId". + // + // * `//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId` string linked_resource = 4; } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/search_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/search_pb2.py index 5c7ea4466f44..480288089982 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/search_pb2.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/search_pb2.py @@ -16,7 +16,7 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -28,10 +28,10 @@ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" ), serialized_pb=_b( - '\n3google/cloud/datacatalog_v1beta1/proto/search.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xbd\x01\n\x13SearchCatalogResult\x12N\n\x12search_result_type\x18\x01 \x01(\x0e\x32\x32.google.cloud.datacatalog.v1beta1.SearchResultType\x12\x1d\n\x15search_result_subtype\x18\x02 \x01(\t\x12\x1e\n\x16relative_resource_name\x18\x03 \x01(\t\x12\x17\n\x0flinked_resource\x18\x04 \x01(\t*d\n\x10SearchResultType\x12"\n\x1eSEARCH_RESULT_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x45NTRY\x10\x01\x12\x10\n\x0cTAG_TEMPLATE\x10\x02\x12\x0f\n\x0b\x45NTRY_GROUP\x10\x03\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + '\n3google/cloud/datacatalog_v1beta1/proto/search.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xbd\x01\n\x13SearchCatalogResult\x12N\n\x12search_result_type\x18\x01 \x01(\x0e\x32\x32.google.cloud.datacatalog.v1beta1.SearchResultType\x12\x1d\n\x15search_result_subtype\x18\x02 \x01(\t\x12\x1e\n\x16relative_resource_name\x18\x03 \x01(\t\x12\x17\n\x0flinked_resource\x18\x04 \x01(\t*d\n\x10SearchResultType\x12"\n\x1eSEARCH_RESULT_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x45NTRY\x10\x01\x12\x10\n\x0cTAG_TEMPLATE\x10\x02\x12\x0f\n\x0b\x45NTRY_GROUP\x10\x03\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, ], ) @@ -61,8 +61,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=344, - serialized_end=444, + serialized_start=347, + serialized_end=447, ) _sym_db.RegisterEnumDescriptor(_SEARCHRESULTTYPE) @@ -161,8 +161,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=153, - serialized_end=342, + serialized_start=156, + serialized_end=345, ) _SEARCHCATALOGRESULT.fields_by_name["search_result_type"].enum_type = _SEARCHRESULTTYPE @@ -188,17 +188,18 @@ Sub-type of the search result. This is a dot-delimited description of the resource's full type, and is the same as the value callers would provide in the "type" search facet. - Examples: "entry.table", "entry.dataStream", "tagTemplate" + Examples: ``entry.table``, ``entry.dataStream``, + ``tagTemplate``. relative_resource_name: The relative resource name of the resource in URL format. - Examples: "projects/{project\_id}/locations/{location\_id}/ent - ryGroups/{entry\_group\_id}/entries/{entry\_id}". - "projects/{project\_id}/tagTemplates/{tag\_template\_id}". + Examples: - ``projects/{project_id}/locations/{location_id}/ + entryGroups/{entry_group_id}/entries/{entry_id}`` - + ``projects/{project_id}/tagTemplates/{tag_template_id}`` linked_resource: The full name of the cloud resource the entry belongs to. See: https://cloud.google.com/apis/design/resource\_names#full\_res - ource\_name Example: "//bigquery.googleapis.com/projects/proje - ctId/datasets/datasetId/tables/tableId". + ource\_name. Example: - ``//bigquery.googleapis.com/projects + /projectId/datasets/datasetId/tables/tableId`` """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.SearchCatalogResult) ), diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec.proto index 8e9547fea783..4f9fddaaf97b 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec.proto +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec.proto @@ -17,6 +17,9 @@ syntax = "proto3"; package google.cloud.datacatalog.v1beta1; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; + option cc_enable_arenas = true; option go_package = "google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog"; option java_multiple_files = true; @@ -25,16 +28,17 @@ option java_package = "com.google.cloud.datacatalog"; // Describes a BigQuery table. message BigQueryTableSpec { // Output only. The table source type. - TableSourceType table_source_type = 1; + TableSourceType table_source_type = 1 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. oneof type_spec { // Table view specification. This field should only be populated if - // table_source_type is BIGQUERY_VIEW. + // `table_source_type` is `BIGQUERY_VIEW`. ViewSpec view_spec = 2; // Spec of a BigQuery table. This field should only be populated if - // table_source_type is BIGQUERY_TABLE. + // `table_source_type` is `BIGQUERY_TABLE`. TableSpec table_spec = 3; } } @@ -54,35 +58,44 @@ enum TableSourceType { // Table view specification. message ViewSpec { // Output only. The query that defines the table view. - string view_query = 1; + string view_query = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Normal BigQuery table spec. message TableSpec { - // Output only. If the table is a dated shard, i.e. with name pattern - // [prefix]YYYYMMDD, grouped_entry is the Data Catalog resource name of the - // date sharded grouped entry, e.g. - // projects/{project_id}/locations/{location}/entrygroups/{entry_group_id} - // /entries/{entry_id}. - // Otherwise, grouped_entry will be empty. - string grouped_entry = 1; + // Output only. If the table is a dated shard, i.e., with name pattern + // `[prefix]YYYYMMDD`, `grouped_entry` is the Data Catalog resource name of + // the date sharded grouped entry, for example, + // `projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}`. + // Otherwise, `grouped_entry` is empty. + string grouped_entry = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Entry" + } + ]; } -// Spec for a group of BigQuery tables with name pattern [prefix]YYYYMMDD. +// Spec for a group of BigQuery tables with name pattern `[prefix]YYYYMMDD`. // Context: // https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding message BigQueryDateShardedSpec { // Output only. The Data Catalog resource name of the dataset entry the - // current table belongs to, e.g. - // projects/{project_id}/locations/{location}/entrygroups/{entry_group_id} - // /entries/{entry_id} - string dataset = 1; + // current table belongs to, for example, + // `projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}`. + string dataset = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Entry" + } + ]; // Output only. The table name prefix of the shards. The name of any given - // shard is [table_prefix]YYYYMMDD, e.g. for shard MyTable20180101, the - // table_prefix is "MyTable" - string table_prefix = 2; + // shard is + // `[table_prefix]YYYYMMDD`, for example, for shard `MyTable20180101`, the + // `table_prefix` is `MyTable`. + string table_prefix = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Total number of shards. - int64 shard_count = 3; + int64 shard_count = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2.py index c2fcf4ba9ea1..95d06ed5d360 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2.py @@ -16,6 +16,10 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 + + DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/datacatalog_v1beta1/proto/table_spec.proto", package="google.cloud.datacatalog.v1beta1", @@ -24,8 +28,12 @@ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" ), serialized_pb=_b( - '\n7google/cloud/datacatalog_v1beta1/proto/table_spec.proto\x12 google.cloud.datacatalog.v1beta1"\xf2\x01\n\x11\x42igQueryTableSpec\x12L\n\x11table_source_type\x18\x01 \x01(\x0e\x32\x31.google.cloud.datacatalog.v1beta1.TableSourceType\x12?\n\tview_spec\x18\x02 \x01(\x0b\x32*.google.cloud.datacatalog.v1beta1.ViewSpecH\x00\x12\x41\n\ntable_spec\x18\x03 \x01(\x0b\x32+.google.cloud.datacatalog.v1beta1.TableSpecH\x00\x42\x0b\n\ttype_spec"\x1e\n\x08ViewSpec\x12\x12\n\nview_query\x18\x01 \x01(\t""\n\tTableSpec\x12\x15\n\rgrouped_entry\x18\x01 \x01(\t"U\n\x17\x42igQueryDateShardedSpec\x12\x0f\n\x07\x64\x61taset\x18\x01 \x01(\t\x12\x14\n\x0ctable_prefix\x18\x02 \x01(\t\x12\x13\n\x0bshard_count\x18\x03 \x01(\x03*[\n\x0fTableSourceType\x12!\n\x1dTABLE_SOURCE_TYPE_UNSPECIFIED\x10\x00\x12\x11\n\rBIGQUERY_VIEW\x10\x02\x12\x12\n\x0e\x42IGQUERY_TABLE\x10\x05\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + '\n7google/cloud/datacatalog_v1beta1/proto/table_spec.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto"\xf7\x01\n\x11\x42igQueryTableSpec\x12Q\n\x11table_source_type\x18\x01 \x01(\x0e\x32\x31.google.cloud.datacatalog.v1beta1.TableSourceTypeB\x03\xe0\x41\x03\x12?\n\tview_spec\x18\x02 \x01(\x0b\x32*.google.cloud.datacatalog.v1beta1.ViewSpecH\x00\x12\x41\n\ntable_spec\x18\x03 \x01(\x0b\x32+.google.cloud.datacatalog.v1beta1.TableSpecH\x00\x42\x0b\n\ttype_spec"#\n\x08ViewSpec\x12\x17\n\nview_query\x18\x01 \x01(\tB\x03\xe0\x41\x03"L\n\tTableSpec\x12?\n\rgrouped_entry\x18\x01 \x01(\tB(\xe0\x41\x03\xfa\x41"\n datacatalog.googleapis.com/Entry"\x89\x01\n\x17\x42igQueryDateShardedSpec\x12\x39\n\x07\x64\x61taset\x18\x01 \x01(\tB(\xe0\x41\x03\xfa\x41"\n datacatalog.googleapis.com/Entry\x12\x19\n\x0ctable_prefix\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x18\n\x0bshard_count\x18\x03 \x01(\x03\x42\x03\xe0\x41\x03*[\n\x0fTableSourceType\x12!\n\x1dTABLE_SOURCE_TYPE_UNSPECIFIED\x10\x00\x12\x11\n\rBIGQUERY_VIEW\x10\x02\x12\x12\n\x0e\x42IGQUERY_TABLE\x10\x05\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' ), + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + ], ) _TABLESOURCETYPE = _descriptor.EnumDescriptor( @@ -50,8 +58,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=493, - serialized_end=584, + serialized_start=658, + serialized_end=749, ) _sym_db.RegisterEnumDescriptor(_TABLESOURCETYPE) @@ -83,7 +91,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -139,8 +147,8 @@ fields=[], ) ], - serialized_start=94, - serialized_end=336, + serialized_start=154, + serialized_end=401, ) @@ -166,7 +174,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ) ], @@ -178,8 +186,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=338, - serialized_end=368, + serialized_start=403, + serialized_end=438, ) @@ -205,7 +213,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\003\372A"\n datacatalog.googleapis.com/Entry'), file=DESCRIPTOR, ) ], @@ -217,8 +225,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=370, - serialized_end=404, + serialized_start=440, + serialized_end=516, ) @@ -244,7 +252,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\003\372A"\n datacatalog.googleapis.com/Entry'), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -262,7 +270,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -280,7 +288,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -292,8 +300,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=406, - serialized_end=491, + serialized_start=519, + serialized_end=656, ) _BIGQUERYTABLESPEC.fields_by_name["table_source_type"].enum_type = _TABLESOURCETYPE @@ -334,10 +342,10 @@ Output only. view_spec: Table view specification. This field should only be populated - if table\_source\_type is BIGQUERY\_VIEW. + if ``table_source_type`` is ``BIGQUERY_VIEW``. table_spec: Spec of a BigQuery table. This field should only be populated - if table\_source\_type is BIGQUERY\_TABLE. + if ``table_source_type`` is ``BIGQUERY_TABLE``. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.BigQueryTableSpec) ), @@ -373,12 +381,12 @@ Attributes: grouped_entry: - Output only. If the table is a dated shard, i.e. with name - pattern [prefix]YYYYMMDD, grouped\_entry is the Data Catalog - resource name of the date sharded grouped entry, e.g. projects - /{project\_id}/locations/{location}/entrygroups/{entry\_group\ - _id} /entries/{entry\_id}. Otherwise, grouped\_entry will be - empty. + Output only. If the table is a dated shard, i.e., with name + pattern ``[prefix]YYYYMMDD``, ``grouped_entry`` is the Data + Catalog resource name of the date sharded grouped entry, for + example, ``projects/{project_id}/locations/{location}/entrygro + ups/{entry_group_id}/entries/{entry_id}``. Otherwise, + ``grouped_entry`` is empty. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.TableSpec) ), @@ -391,21 +399,22 @@ dict( DESCRIPTOR=_BIGQUERYDATESHARDEDSPEC, __module__="google.cloud.datacatalog_v1beta1.proto.table_spec_pb2", - __doc__="""Spec for a group of BigQuery tables with name pattern [prefix]YYYYMMDD. - Context: + __doc__="""Spec for a group of BigQuery tables with name pattern + ``[prefix]YYYYMMDD``. Context: https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning\_versus\_sharding Attributes: dataset: Output only. The Data Catalog resource name of the dataset - entry the current table belongs to, e.g. projects/{project\_id - }/locations/{location}/entrygroups/{entry\_group\_id} - /entries/{entry\_id} + entry the current table belongs to, for example, ``projects/{p + roject_id}/locations/{location}/entrygroups/{entry_group_id}/e + ntries/{entry_id}``. table_prefix: Output only. The table name prefix of the shards. The name of - any given shard is [table\_prefix]YYYYMMDD, e.g. for shard - MyTable20180101, the table\_prefix is "MyTable" + any given shard is ``[table_prefix]YYYYMMDD``, for example, + for shard ``MyTable20180101``, the ``table_prefix`` is + ``MyTable``. shard_count: Output only. Total number of shards. """, @@ -416,4 +425,10 @@ DESCRIPTOR._options = None +_BIGQUERYTABLESPEC.fields_by_name["table_source_type"]._options = None +_VIEWSPEC.fields_by_name["view_query"]._options = None +_TABLESPEC.fields_by_name["grouped_entry"]._options = None +_BIGQUERYDATESHARDEDSPEC.fields_by_name["dataset"]._options = None +_BIGQUERYDATESHARDEDSPEC.fields_by_name["table_prefix"]._options = None +_BIGQUERYDATESHARDEDSPEC.fields_by_name["shard_count"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags.proto index f01843c206e9..c2fc2da43467 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags.proto +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags.proto @@ -17,6 +17,8 @@ syntax = "proto3"; package google.cloud.datacatalog.v1beta1; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/protobuf/timestamp.proto"; option cc_enable_arenas = true; @@ -27,40 +29,49 @@ option java_package = "com.google.cloud.datacatalog"; // Tags are used to attach custom metadata to Data Catalog resources. Tags // conform to the specifications within their tag template. message Tag { - // Required when used in - // [UpdateTagRequest][google.cloud.datacatalog.v1beta1.UpdateTagRequest]. The - // resource name of the tag in URL format. For example, - // projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id}", - // where tag_id is a system-generated identifier. Note that this Tag may not - // actually be stored in the location in this name. + option (google.api.resource) = { + type: "datacatalog.googleapis.com/Tag" + pattern: "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}" + }; + + // The resource name of the tag in URL format. Example: + // + // * projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} + // + // where `tag_id` is a system-generated identifier. + // Note that this Tag may not actually be stored in the location in this name. string name = 1; - // Required. The resource name of the tag template that this tag uses. For - // example, - // projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}. + // Required. The resource name of the tag template that this tag uses. + // Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + // // This field cannot be modified after creation. - string template = 2; + string template = 2 [(google.api.field_behavior) = REQUIRED]; // Output only. The display name of the tag template. - string template_display_name = 5; + string template_display_name = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Optional. The scope within the parent resource that this tag is attached - // to. If not provided, the tag is attached to the parent resource itself. + // The scope within the parent resource that this tag is attached to. If not + // provided, the tag is attached to the parent resource itself. // Deleting the scope from the parent resource will delete all tags attached // to that scope. These fields cannot be updated after creation. oneof scope { // Resources like Entry can have schemas associated with them. This scope // allows users to attach tags to an individual column based on that schema. // - // For attaching a tag to a nested column, use '.' to separate the column - // names: "outer_column.inner_column". + // For attaching a tag to a nested column, use `.` to separate the column + // names. Example: + // + // * `outer_column.inner_column` string column = 4; } - // Required. This maps the id of a tag field to the value of & additional + // Required. This maps the ID of a tag field to the value of and additional // information about that field. Valid field IDs are defined by the tag's // template. A tag must have at least 1 field and at most 500 fields. - map fields = 3; + map fields = 3 [(google.api.field_behavior) = REQUIRED]; } // Contains the value and supporting information for a field within @@ -73,7 +84,7 @@ message TagField { } // Output only. The display name of this field. - string display_name = 1; + string display_name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Required. The value of this field. oneof kind { @@ -95,41 +106,59 @@ message TagField { } } -// Tag templates defines the schema of the tags used to attach to Data Catalog +// A tag template defines the schema of the tags used to attach to Data Catalog // resources. It defines the mapping of accepted field names and types that can // be used within the tag. The tag template also controls the access to the tag. message TagTemplate { - // Required when used in - // [UpdateTagTemplateRequest][google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest]. - // The resource name of the tag template in URL format. For example, - // projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}. + option (google.api.resource) = { + type: "datacatalog.googleapis.com/TagTemplate" + pattern: "projects/{project}/locations/{location}/tagTemplates/{tag_template}" + }; + + // The resource name of the tag template in URL format. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + // // Note that this TagTemplate and its child resources may not actually be // stored in the location in this name. string name = 1; - // Optional. The display name for this template. Default value is an empty - // string. + // The display name for this template. Defaults to an empty string. string display_name = 2; - // Required. Map of tag template field ids to the settings for the field. + // Required. Map of tag template field IDs to the settings for the field. // This map is an exhaustive list of the allowed fields. This map must contain // at least one field and at most 500 fields. // // The keys to this map are tag template field IDs. Field IDs can contain // letters (both uppercase and lowercase), numbers (0-9) and underscores (_). - // Field IDs must be at least 1 character long and at most 64 characters long. - // Field IDs must start with a letter or underscore. - map fields = 3; + // Field IDs must be at least 1 character long and at most + // 64 characters long. Field IDs must start with a letter or underscore. + map fields = 3 + [(google.api.field_behavior) = REQUIRED]; } // The template for an individual field within a tag template. message TagTemplateField { - // Optional. The display name for this field. Default value is an empty - // string. + option (google.api.resource) = { + type: "datacatalog.googleapis.com/TagTemplateField" + pattern: "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}" + }; + + // Output only. The resource name of the tag template field in URL format. + // Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field} + // + // Note that this TagTemplateField may not actually be stored in the location + // in this name. + string name = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The display name for this field. Defaults to an empty string. string display_name = 1; // Required. The type of value this tag field can contain. - FieldType type = 2; + FieldType type = 2 [(google.api.field_behavior) = REQUIRED]; } message FieldType { @@ -137,15 +166,15 @@ message FieldType { message EnumValue { // Required. The display name of the enum value. Must not be an empty // string. - string display_name = 1; + string display_name = 1 [(google.api.field_behavior) = REQUIRED]; } - // Required. The set of allowed values for this enum. This set must not be - // empty, the display names of the values in this set must not be empty and - // the display names of the values must be case-insensitively unique within - // this set. Currently, enum values can only be added to the list of allowed - // values. Deletion and renaming of enum values are not supported. Can have - // up to 500 allowed values. + // Required on create; optional on update. The set of allowed values for + // this enum. This set must not be empty, the display names of the values in + // this set must not be empty and the display names of the values must be + // case-insensitively unique within this set. Currently, enum values can + // only be added to the list of allowed values. Deletion and renaming of + // enum values are not supported. Can have up to 500 allowed values. repeated EnumValue allowed_values = 1; } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags_pb2.py index c9d36922e79e..379a68564735 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags_pb2.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags_pb2.py @@ -15,6 +15,8 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -26,9 +28,13 @@ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" ), serialized_pb=_b( - '\n1google/cloud/datacatalog_v1beta1/proto/tags.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/protobuf/timestamp.proto"\xfd\x01\n\x03Tag\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08template\x18\x02 \x01(\t\x12\x1d\n\x15template_display_name\x18\x05 \x01(\t\x12\x10\n\x06\x63olumn\x18\x04 \x01(\tH\x00\x12\x41\n\x06\x66ields\x18\x03 \x03(\x0b\x32\x31.google.cloud.datacatalog.v1beta1.Tag.FieldsEntry\x1aY\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.google.cloud.datacatalog.v1beta1.TagField:\x02\x38\x01\x42\x07\n\x05scope"\x94\x02\n\x08TagField\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\x16\n\x0c\x64ouble_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12\x35\n\x0ftimestamp_value\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12J\n\nenum_value\x18\x06 \x01(\x0b\x32\x34.google.cloud.datacatalog.v1beta1.TagField.EnumValueH\x00\x1a!\n\tEnumValue\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\tB\x06\n\x04kind"\xdf\x01\n\x0bTagTemplate\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12I\n\x06\x66ields\x18\x03 \x03(\x0b\x32\x39.google.cloud.datacatalog.v1beta1.TagTemplate.FieldsEntry\x1a\x61\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.TagTemplateField:\x02\x38\x01"c\n\x10TagTemplateField\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\x39\n\x04type\x18\x02 \x01(\x0b\x32+.google.cloud.datacatalog.v1beta1.FieldType"\xa2\x03\n\tFieldType\x12S\n\x0eprimitive_type\x18\x01 \x01(\x0e\x32\x39.google.cloud.datacatalog.v1beta1.FieldType.PrimitiveTypeH\x00\x12I\n\tenum_type\x18\x02 \x01(\x0b\x32\x34.google.cloud.datacatalog.v1beta1.FieldType.EnumTypeH\x00\x1a\x85\x01\n\x08\x45numType\x12V\n\x0e\x61llowed_values\x18\x01 \x03(\x0b\x32>.google.cloud.datacatalog.v1beta1.FieldType.EnumType.EnumValue\x1a!\n\tEnumValue\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t"`\n\rPrimitiveType\x12\x1e\n\x1aPRIMITIVE_TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06\x44OUBLE\x10\x01\x12\n\n\x06STRING\x10\x02\x12\x08\n\x04\x42OOL\x10\x03\x12\r\n\tTIMESTAMP\x10\x04\x42\x0b\n\ttype_declBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + '\n1google/cloud/datacatalog_v1beta1/proto/tags.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x90\x03\n\x03Tag\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x15\n\x08template\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12"\n\x15template_display_name\x18\x05 \x01(\tB\x03\xe0\x41\x03\x12\x10\n\x06\x63olumn\x18\x04 \x01(\tH\x00\x12\x46\n\x06\x66ields\x18\x03 \x03(\x0b\x32\x31.google.cloud.datacatalog.v1beta1.Tag.FieldsEntryB\x03\xe0\x41\x02\x1aY\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.google.cloud.datacatalog.v1beta1.TagField:\x02\x38\x01:\x81\x01\xea\x41~\n\x1e\x64\x61tacatalog.googleapis.com/Tag\x12\\projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}B\x07\n\x05scope"\x99\x02\n\x08TagField\x12\x19\n\x0c\x64isplay_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x16\n\x0c\x64ouble_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12\x35\n\x0ftimestamp_value\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12J\n\nenum_value\x18\x06 \x01(\x0b\x32\x34.google.cloud.datacatalog.v1beta1.TagField.EnumValueH\x00\x1a!\n\tEnumValue\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\tB\x06\n\x04kind"\xd6\x02\n\x0bTagTemplate\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12N\n\x06\x66ields\x18\x03 \x03(\x0b\x32\x39.google.cloud.datacatalog.v1beta1.TagTemplate.FieldsEntryB\x03\xe0\x41\x02\x1a\x61\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.TagTemplateField:\x02\x38\x01:p\xea\x41m\n&datacatalog.googleapis.com/TagTemplate\x12\x43projects/{project}/locations/{location}/tagTemplates/{tag_template}"\x83\x02\n\x10TagTemplateField\x12\x11\n\x04name\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12>\n\x04type\x18\x02 \x01(\x0b\x32+.google.cloud.datacatalog.v1beta1.FieldTypeB\x03\xe0\x41\x02:\x85\x01\xea\x41\x81\x01\n+datacatalog.googleapis.com/TagTemplateField\x12Rprojects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}"\xa7\x03\n\tFieldType\x12S\n\x0eprimitive_type\x18\x01 \x01(\x0e\x32\x39.google.cloud.datacatalog.v1beta1.FieldType.PrimitiveTypeH\x00\x12I\n\tenum_type\x18\x02 \x01(\x0b\x32\x34.google.cloud.datacatalog.v1beta1.FieldType.EnumTypeH\x00\x1a\x8a\x01\n\x08\x45numType\x12V\n\x0e\x61llowed_values\x18\x01 \x03(\x0b\x32>.google.cloud.datacatalog.v1beta1.FieldType.EnumType.EnumValue\x1a&\n\tEnumValue\x12\x19\n\x0c\x64isplay_name\x18\x01 \x01(\tB\x03\xe0\x41\x02"`\n\rPrimitiveType\x12\x1e\n\x1aPRIMITIVE_TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06\x44OUBLE\x10\x01\x12\n\n\x06STRING\x10\x02\x12\x08\n\x04\x42OOL\x10\x03\x12\r\n\tTIMESTAMP\x10\x04\x42\x0b\n\ttype_declBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], ) @@ -60,8 +66,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1292, - serialized_end=1388, + serialized_start=1789, + serialized_end=1885, ) _sym_db.RegisterEnumDescriptor(_FIELDTYPE_PRIMITIVETYPE) @@ -118,8 +124,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=276, - serialized_end=365, + serialized_start=351, + serialized_end=440, ) _TAG = _descriptor.Descriptor( @@ -162,7 +168,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -180,7 +186,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -216,14 +222,16 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[_TAG_FIELDSENTRY], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A~\n\036datacatalog.googleapis.com/Tag\022\\projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -236,8 +244,8 @@ fields=[], ) ], - serialized_start=121, - serialized_end=374, + serialized_start=181, + serialized_end=581, ) @@ -275,8 +283,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=612, - serialized_end=645, + serialized_start=824, + serialized_end=857, ) _TAGFIELD = _descriptor.Descriptor( @@ -301,7 +309,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -411,8 +419,8 @@ fields=[], ) ], - serialized_start=377, - serialized_end=653, + serialized_start=584, + serialized_end=865, ) @@ -468,8 +476,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=782, - serialized_end=879, + serialized_start=999, + serialized_end=1096, ) _TAGTEMPLATE = _descriptor.Descriptor( @@ -530,20 +538,22 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[_TAGTEMPLATE_FIELDSENTRY], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Am\n&datacatalog.googleapis.com/TagTemplate\022Cprojects/{project}/locations/{location}/tagTemplates/{tag_template}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=656, - serialized_end=879, + serialized_start=868, + serialized_end=1210, ) @@ -554,10 +564,28 @@ file=DESCRIPTOR, containing_type=None, fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.datacatalog.v1beta1.TagTemplateField.name", + index=0, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="display_name", full_name="google.cloud.datacatalog.v1beta1.TagTemplateField.display_name", - index=0, + index=1, number=1, type=9, cpp_type=9, @@ -575,7 +603,7 @@ _descriptor.FieldDescriptor( name="type", full_name="google.cloud.datacatalog.v1beta1.TagTemplateField.type", - index=1, + index=2, number=2, type=11, cpp_type=10, @@ -587,20 +615,22 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A\201\001\n+datacatalog.googleapis.com/TagTemplateField\022Rprojects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=881, - serialized_end=980, + serialized_start=1213, + serialized_end=1472, ) @@ -626,7 +656,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -638,8 +668,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=612, - serialized_end=645, + serialized_start=1749, + serialized_end=1787, ) _FIELDTYPE_ENUMTYPE = _descriptor.Descriptor( @@ -676,8 +706,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1157, - serialized_end=1290, + serialized_start=1649, + serialized_end=1787, ) _FIELDTYPE = _descriptor.Descriptor( @@ -740,8 +770,8 @@ fields=[], ) ], - serialized_start=983, - serialized_end=1401, + serialized_start=1475, + serialized_end=1898, ) _TAG_FIELDSENTRY.fields_by_name["value"].message_type = _TAGFIELD @@ -828,34 +858,32 @@ Attributes: name: - Required when used in [UpdateTagRequest][google.cloud.datacata - log.v1beta1.UpdateTagRequest]. The resource name of the tag in - URL format. For example, projects/{project\_id}/locations/{loc - ation}/entrygroups/{entry\_group\_id}/entries/{entry\_id}/tags - /{tag\_id}", where tag\_id is a system-generated identifier. - Note that this Tag may not actually be stored in the location - in this name. + The resource name of the tag in URL format. Example: - proje + cts/{project\_id}/locations/{location}/entrygroups/{entry\_gro + up\_id}/entries/{entry\_id}/tags/{tag\_id} where ``tag_id`` + is a system-generated identifier. Note that this Tag may not + actually be stored in the location in this name. template: Required. The resource name of the tag template that this tag - uses. For example, projects/{project\_id}/locations/{location} - /tagTemplates/{tag\_template\_id}. This field cannot be + uses. Example: - projects/{project\_id}/locations/{location} + /tagTemplates/{tag\_template\_id} This field cannot be modified after creation. template_display_name: Output only. The display name of the tag template. scope: - Optional. The scope within the parent resource that this tag - is attached to. If not provided, the tag is attached to the - parent resource itself. Deleting the scope from the parent - resource will delete all tags attached to that scope. These - fields cannot be updated after creation. + The scope within the parent resource that this tag is attached + to. If not provided, the tag is attached to the parent + resource itself. Deleting the scope from the parent resource + will delete all tags attached to that scope. These fields + cannot be updated after creation. column: Resources like Entry can have schemas associated with them. This scope allows users to attach tags to an individual column based on that schema. For attaching a tag to a nested column, - use '.' to separate the column names: - "outer\_column.inner\_column". + use ``.`` to separate the column names. Example: - + ``outer_column.inner_column`` fields: - Required. This maps the id of a tag field to the value of & + Required. This maps the ID of a tag field to the value of and additional information about that field. Valid field IDs are defined by the tag's template. A tag must have at least 1 field and at most 500 fields. @@ -931,7 +959,7 @@ ), DESCRIPTOR=_TAGTEMPLATE, __module__="google.cloud.datacatalog_v1beta1.proto.tags_pb2", - __doc__="""Tag templates defines the schema of the tags used to attach to Data + __doc__="""A tag template defines the schema of the tags used to attach to Data Catalog resources. It defines the mapping of accepted field names and types that can be used within the tag. The tag template also controls the access to the tag. @@ -939,17 +967,16 @@ Attributes: name: - Required when used in [UpdateTagTemplateRequest][google.cloud. - datacatalog.v1beta1.UpdateTagTemplateRequest]. The resource - name of the tag template in URL format. For example, projects/ - {project\_id}/locations/{location}/tagTemplates/{tag\_template - \_id}. Note that this TagTemplate and its child resources may - not actually be stored in the location in this name. + The resource name of the tag template in URL format. Example: + - projects/{project\_id}/locations/{location}/tagTemplates/{t + ag\_template\_id} Note that this TagTemplate and its child + resources may not actually be stored in the location in this + name. display_name: - Optional. The display name for this template. Default value is - an empty string. + The display name for this template. Defaults to an empty + string. fields: - Required. Map of tag template field ids to the settings for + Required. Map of tag template field IDs to the settings for the field. This map is an exhaustive list of the allowed fields. This map must contain at least one field and at most 500 fields. The keys to this map are tag template field IDs. @@ -974,9 +1001,14 @@ Attributes: + name: + Output only. The resource name of the tag template field in + URL format. Example: - projects/{project\_id}/locations/{loc + ation}/tagTemplates/{tag\_template}/fields/{field} Note that + this TagTemplateField may not actually be stored in the + location in this name. display_name: - Optional. The display name for this field. Default value is an - empty string. + The display name for this field. Defaults to an empty string. type: Required. The type of value this tag field can contain. """, @@ -1013,13 +1045,13 @@ __doc__=""" Attributes: allowed_values: - Required. The set of allowed values for this enum. This set - must not be empty, the display names of the values in this set - must not be empty and the display names of the values must be - case-insensitively unique within this set. Currently, enum - values can only be added to the list of allowed values. - Deletion and renaming of enum values are not supported. Can - have up to 500 allowed values. + Required on create; optional on update. The set of allowed + values for this enum. This set must not be empty, the display + names of the values in this set must not be empty and the + display names of the values must be case-insensitively unique + within this set. Currently, enum values can only be added to + the list of allowed values. Deletion and renaming of enum + values are not supported. Can have up to 500 allowed values. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.FieldType.EnumType) ), @@ -1045,5 +1077,16 @@ DESCRIPTOR._options = None _TAG_FIELDSENTRY._options = None +_TAG.fields_by_name["template"]._options = None +_TAG.fields_by_name["template_display_name"]._options = None +_TAG.fields_by_name["fields"]._options = None +_TAG._options = None +_TAGFIELD.fields_by_name["display_name"]._options = None _TAGTEMPLATE_FIELDSENTRY._options = None +_TAGTEMPLATE.fields_by_name["fields"]._options = None +_TAGTEMPLATE._options = None +_TAGTEMPLATEFIELD.fields_by_name["name"]._options = None +_TAGTEMPLATEFIELD.fields_by_name["type"]._options = None +_TAGTEMPLATEFIELD._options = None +_FIELDTYPE_ENUMTYPE_ENUMVALUE.fields_by_name["display_name"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps.proto index bb048b915223..9a3d640e411c 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps.proto +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps.proto @@ -17,6 +17,7 @@ syntax = "proto3"; package google.cloud.datacatalog.v1beta1; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; option cc_enable_arenas = true; @@ -26,13 +27,14 @@ option java_package = "com.google.cloud.datacatalog"; // Timestamps about this resource according to a particular system. message SystemTimestamps { - // Output only. The creation time of the resource within the given system. + // The creation time of the resource within the given system. google.protobuf.Timestamp create_time = 1; - // Output only. The last-modified time of the resource within the given - // system. + // The last-modified time of the resource within the given system. google.protobuf.Timestamp update_time = 2; // Output only. The expiration time of the resource within the given system. - google.protobuf.Timestamp expire_time = 3; + // Currently only apllicable to BigQuery resources. + google.protobuf.Timestamp expire_time = 3 + [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2.py index 3c1c341f65ce..4dff9fc23fa7 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2.py @@ -15,6 +15,7 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -26,9 +27,12 @@ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" ), serialized_pb=_b( - '\n7google/cloud/datacatalog_v1beta1/proto/timestamps.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/protobuf/timestamp.proto"\xa5\x01\n\x10SystemTimestamps\x12/\n\x0b\x63reate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + '\n7google/cloud/datacatalog_v1beta1/proto/timestamps.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xaa\x01\n\x10SystemTimestamps\x12/\n\x0b\x63reate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], ) @@ -90,7 +94,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -102,8 +106,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=127, - serialized_end=292, + serialized_start=160, + serialized_end=330, ) _SYSTEMTIMESTAMPS.fields_by_name[ @@ -129,14 +133,13 @@ Attributes: create_time: - Output only. The creation time of the resource within the - given system. + The creation time of the resource within the given system. update_time: - Output only. The last-modified time of the resource within the - given system. + The last-modified time of the resource within the given + system. expire_time: Output only. The expiration time of the resource within the - given system. + given system. Currently only apllicable to BigQuery resources. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.SystemTimestamps) ), @@ -145,4 +148,5 @@ DESCRIPTOR._options = None +_SYSTEMTIMESTAMPS.fields_by_name["expire_time"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/types.py b/datacatalog/google/cloud/datacatalog_v1beta1/types.py index 05d90716fc84..e72d22d4ec84 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/types.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/types.py @@ -21,6 +21,7 @@ from google.api_core.protobuf_helpers import get_messages from google.cloud.datacatalog_v1beta1.proto import datacatalog_pb2 +from google.cloud.datacatalog_v1beta1.proto import gcs_fileset_spec_pb2 from google.cloud.datacatalog_v1beta1.proto import schema_pb2 from google.cloud.datacatalog_v1beta1.proto import search_pb2 from google.cloud.datacatalog_v1beta1.proto import table_spec_pb2 @@ -47,6 +48,7 @@ _local_modules = [ datacatalog_pb2, + gcs_fileset_spec_pb2, schema_pb2, search_pb2, table_spec_pb2, diff --git a/datacatalog/noxfile.py b/datacatalog/noxfile.py index 3d92df19084f..509a565876ed 100644 --- a/datacatalog/noxfile.py +++ b/datacatalog/noxfile.py @@ -125,6 +125,25 @@ def system(session): session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) +@nox.session(python=["2.7", "3.7"]) +def samples(session): + requirements_path = os.path.join("samples", "requirements.txt") + requirements_exists = os.path.exists(requirements_path) + + # Sanity check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + + session.install("mock", "pytest") + for local_dep in LOCAL_DEPS: + session.install("-e", local_dep) + if requirements_exists: + session.install("-r", requirements_path) + session.install("-e", ".") + + session.run("py.test", "--quiet", "samples", *session.posargs) + + @nox.session(python="3.7") def cover(session): """Run the final coverage report. diff --git a/datacatalog/samples/__init__.py b/datacatalog/samples/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/datacatalog/samples/tests/__init__.py b/datacatalog/samples/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/datacatalog/samples/tests/conftest.py b/datacatalog/samples/tests/conftest.py new file mode 100644 index 000000000000..b0669fa0df28 --- /dev/null +++ b/datacatalog/samples/tests/conftest.py @@ -0,0 +1,81 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import datetime +import uuid + +import pytest + +import google.auth +from google.cloud import datacatalog_v1beta1 + + +@pytest.fixture(scope="session") +def client(credentials): + return datacatalog_v1beta1.DataCatalogClient(credentials=credentials) + + +@pytest.fixture(scope="session") +def default_credentials(): + return google.auth.default() + + +@pytest.fixture(scope="session") +def credentials(default_credentials): + return default_credentials[0] + + +@pytest.fixture(scope="session") +def project_id(default_credentials): + return default_credentials[1] + + +@pytest.fixture +def random_entry_group_id(client, project_id): + now = datetime.datetime.now() + random_entry_group_id = "example_entry_group_{}_{}".format( + now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] + ) + yield random_entry_group_id + entry_group_name = datacatalog_v1beta1.DataCatalogClient.entry_group_path( + project_id, "us-central1", random_entry_group_id + ) + client.delete_entry_group(entry_group_name) + + +@pytest.fixture +def random_entry_name(client, entry_group_name): + now = datetime.datetime.now() + random_entry_id = "example_entry_{}_{}".format( + now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] + ) + random_entry_name = "{}/entries/{}".format(entry_group_name, random_entry_id) + yield random_entry_name + client.delete_entry(random_entry_name) + + +@pytest.fixture +def entry_group_name(client, project_id): + now = datetime.datetime.now() + entry_group_id = "python_entry_group_sample_{}_{}".format( + now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] + ) + entry_group = client.create_entry_group( + datacatalog_v1beta1.DataCatalogClient.location_path(project_id, "us-central1"), + entry_group_id, + {}, + ) + yield entry_group.name + client.delete_entry_group(entry_group.name) diff --git a/datacatalog/samples/tests/test_create_entry_group.py b/datacatalog/samples/tests/test_create_entry_group.py new file mode 100644 index 000000000000..9c8c33b8cd64 --- /dev/null +++ b/datacatalog/samples/tests/test_create_entry_group.py @@ -0,0 +1,29 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from ..v1beta1 import create_entry_group + + +def test_create_entry_group(capsys, client, project_id, random_entry_group_id): + + create_entry_group.create_entry_group(client, project_id, random_entry_group_id) + out, err = capsys.readouterr() + assert ( + "Created entry group" + " projects/{}/locations/{}/entryGroups/{}".format( + project_id, "us-central1", random_entry_group_id + ) + in out + ) diff --git a/datacatalog/samples/tests/test_create_fileset_entry.py b/datacatalog/samples/tests/test_create_fileset_entry.py new file mode 100644 index 000000000000..8d0bc28fd07f --- /dev/null +++ b/datacatalog/samples/tests/test_create_fileset_entry.py @@ -0,0 +1,30 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import re + +from ..v1beta1 import create_fileset_entry + + +def test_create_fileset_entry(capsys, client, random_entry_name): + + entry_name_pattern = "(?P.+?)/entries/(?P.+?$)" + entry_name_matches = re.match(entry_name_pattern, random_entry_name) + entry_group_name = entry_name_matches.group("entry_group_name") + entry_id = entry_name_matches.group("entry_id") + + create_fileset_entry.create_fileset_entry(client, entry_group_name, entry_id) + out, err = capsys.readouterr() + assert "Created entry {}".format(random_entry_name) in out diff --git a/datacatalog/samples/v1beta1/__init__.py b/datacatalog/samples/v1beta1/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/datacatalog/samples/v1beta1/create_entry_group.py b/datacatalog/samples/v1beta1/create_entry_group.py new file mode 100644 index 000000000000..24a856d8739c --- /dev/null +++ b/datacatalog/samples/v1beta1/create_entry_group.py @@ -0,0 +1,54 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_entry_group(client, project_id, entry_group_id): + + # [START datacatalog_create_entry_group_tag] + from google.cloud import datacatalog_v1beta1 + + # TODO(developer): Construct a Data Catalog client object. + # client = datacatalog_v1beta1.DataCatalogClient() + + # TODO(developer): Set entry_group_id to the ID of the + # entry group to create. + # project_id = "your-project-id" + + # TODO(developer): Specify the geographic location where the + # entry group should reside. + # Currently, Data Catalog stores metadata in the us-central1 region. + location_id = "us-central1" + + # TODO(developer): Set entry_group_id to the ID of the + # entry group to create. + # entry_group_id = "your_entry_group_id" + + # Construct a full location path to be the parent of the entry group. + parent = datacatalog_v1beta1.DataCatalogClient.location_path( + project_id, location_id + ) + + # Construct a full EntryGroup object to send to the API. + entry_group = datacatalog_v1beta1.types.EntryGroup() + entry_group.display_name = "My Entry Group" + entry_group.description = "This Entry Group consists of ..." + + # Send the entry group to the API for creation. + # Raises google.api_core.exceptions.AlreadyExists if the Entry Group + # already exists within the project. + entry_group = client.create_entry_group( + parent, entry_group_id, entry_group + ) # Make an API request. + print("Created entry group {}".format(entry_group.name)) + # [END datacatalog_create_entry_group_tag] diff --git a/datacatalog/samples/v1beta1/create_fileset_entry.py b/datacatalog/samples/v1beta1/create_fileset_entry.py new file mode 100644 index 000000000000..6cc275655988 --- /dev/null +++ b/datacatalog/samples/v1beta1/create_fileset_entry.py @@ -0,0 +1,86 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_fileset_entry(client, entry_group_name, entry_id): + + # [START datacatalog_create_fileset_tag] + from google.cloud import datacatalog_v1beta1 + + # TODO(developer): Construct a Data Catalog client object. + # client = datacatalog_v1beta1.DataCatalogClient() + + # TODO(developer): Set entry_group_name to the Name of the entry group + # the entry will belong. + # entry_group_name = "your_entry_group_name" + + # TODO(developer): Set entry_id to the ID of the entry to create. + # entry_id = "your_entry_id" + + # Construct a full Entry object to send to the API. + entry = datacatalog_v1beta1.types.Entry() + entry.display_name = "My Fileset" + entry.description = "This Fileset consists of ..." + entry.gcs_fileset_spec.file_patterns.append("gs://my_bucket/*") + entry.type = datacatalog_v1beta1.enums.EntryType.FILESET + + # Create the Schema, for example when you have a csv file. + columns = [] + columns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="first_name", + description="First name", + mode="REQUIRED", + type="STRING", + ) + ) + + columns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="last_name", description="Last name", mode="REQUIRED", type="STRING" + ) + ) + + # Create sub columns for the addresses parent column + subcolumns = [] + subcolumns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="city", description="City", mode="NULLABLE", type="STRING" + ) + ) + + subcolumns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="state", description="State", mode="NULLABLE", type="STRING" + ) + ) + + columns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="addresses", + description="Addresses", + mode="REPEATED", + subcolumns=subcolumns, + type="RECORD", + ) + ) + + entry.schema.columns.extend(columns) + + # Send the entry to the API for creation. + # Raises google.api_core.exceptions.AlreadyExists if the Entry already + # exists within the project. + entry = client.create_entry(entry_group_name, entry_id, entry) + print("Created entry {}".format(entry.name)) + # [END datacatalog_create_fileset_tag] diff --git a/datacatalog/setup.py b/datacatalog/setup.py index 29bbee31b9d8..624600269ca2 100644 --- a/datacatalog/setup.py +++ b/datacatalog/setup.py @@ -21,7 +21,7 @@ name = "google-cloud-datacatalog" description = "Google Cloud Data Catalog API API client library" -version = "0.3.0" +version = "0.4.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/datacatalog/synth.metadata b/datacatalog/synth.metadata index 39cb85fb4bd1..422d34666ecd 100644 --- a/datacatalog/synth.metadata +++ b/datacatalog/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-27T12:16:56.566500Z", + "updateTime": "2019-10-23T12:17:52.391296Z", "sources": [ { "generator": { "name": "artman", - "version": "0.35.1", - "dockerImage": "googleapis/artman@sha256:b11c7ea0d0831c54016fb50f4b796d24d1971439b30fbc32a369ba1ac887c384" + "version": "0.40.2", + "dockerImage": "googleapis/artman@sha256:3b8f7d9b4c206843ce08053474f5c64ae4d388ff7d995e68b59fb65edf73eeb9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "650caad718bb063f189405c23972dc9818886358", - "internalRef": "265565344" + "sha": "0d0dc5172f16c9815a5eda6e99408fb96282f608", + "internalRef": "276178557" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/datacatalog/synth.py b/datacatalog/synth.py index 468dc63c713d..5f1436288e14 100644 --- a/datacatalog/synth.py +++ b/datacatalog/synth.py @@ -57,7 +57,11 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=80, cov_level=80) +templated_files = common.py_library( + unit_cov_level=80, + cov_level=80, + samples_test=True, +) s.move(templated_files) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/datacatalog/tests/unit/gapic/v1beta1/test_data_catalog_client_v1beta1.py b/datacatalog/tests/unit/gapic/v1beta1/test_data_catalog_client_v1beta1.py index 35dc0b31db1a..61c35d263299 100644 --- a/datacatalog/tests/unit/gapic/v1beta1/test_data_catalog_client_v1beta1.py +++ b/datacatalog/tests/unit/gapic/v1beta1/test_data_catalog_client_v1beta1.py @@ -112,6 +112,183 @@ def test_search_catalog_exception(self): with pytest.raises(CustomException): list(paged_list_response) + def test_create_entry_group(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + description = "description-1724546052" + expected_response = { + "name": name, + "display_name": display_name, + "description": description, + } + expected_response = datacatalog_pb2.EntryGroup(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + entry_group_id = "entryGroupId-43122680" + entry_group = {} + + response = client.create_entry_group(parent, entry_group_id, entry_group) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datacatalog_pb2.CreateEntryGroupRequest( + parent=parent, entry_group_id=entry_group_id, entry_group=entry_group + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_entry_group_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + entry_group_id = "entryGroupId-43122680" + entry_group = {} + + with pytest.raises(CustomException): + client.create_entry_group(parent, entry_group_id, entry_group) + + def test_get_entry_group(self): + # Setup Expected Response + name_2 = "name2-1052831874" + display_name = "displayName1615086568" + description = "description-1724546052" + expected_response = { + "name": name_2, + "display_name": display_name, + "description": description, + } + expected_response = datacatalog_pb2.EntryGroup(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup Request + name = client.entry_group_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]") + + response = client.get_entry_group(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datacatalog_pb2.GetEntryGroupRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_entry_group_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup request + name = client.entry_group_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]") + + with pytest.raises(CustomException): + client.get_entry_group(name) + + def test_delete_entry_group(self): + channel = ChannelStub() + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup Request + name = client.entry_group_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]") + + client.delete_entry_group(name) + + assert len(channel.requests) == 1 + expected_request = datacatalog_pb2.DeleteEntryGroupRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_entry_group_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup request + name = client.entry_group_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]") + + with pytest.raises(CustomException): + client.delete_entry_group(name) + + def test_create_entry(self): + # Setup Expected Response + name = "name3373707" + linked_resource = "linkedResource1544625012" + display_name = "displayName1615086568" + description = "description-1724546052" + expected_response = { + "name": name, + "linked_resource": linked_resource, + "display_name": display_name, + "description": description, + } + expected_response = datacatalog_pb2.Entry(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup Request + parent = client.entry_group_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]") + entry_id = "entryId-2093663224" + entry = {} + + response = client.create_entry(parent, entry_id, entry) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datacatalog_pb2.CreateEntryRequest( + parent=parent, entry_id=entry_id, entry=entry + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_entry_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup request + parent = client.entry_group_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]") + entry_id = "entryId-2093663224" + entry = {} + + with pytest.raises(CustomException): + client.create_entry(parent, entry_id, entry) + def test_update_entry(self): # Setup Expected Response name = "name3373707" @@ -158,6 +335,37 @@ def test_update_entry_exception(self): with pytest.raises(CustomException): client.update_entry(entry) + def test_delete_entry(self): + channel = ChannelStub() + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup Request + name = client.entry_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]", "[ENTRY]") + + client.delete_entry(name) + + assert len(channel.requests) == 1 + expected_request = datacatalog_pb2.DeleteEntryRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_entry_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup request + name = client.entry_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]", "[ENTRY]") + + with pytest.raises(CustomException): + client.delete_entry(name) + def test_get_entry(self): # Setup Expected Response name_2 = "name2-1052831874" @@ -406,8 +614,9 @@ def test_delete_tag_template_exception(self): def test_create_tag_template_field(self): # Setup Expected Response + name = "name3373707" display_name = "displayName1615086568" - expected_response = {"display_name": display_name} + expected_response = {"name": name, "display_name": display_name} expected_response = tags_pb2.TagTemplateField(**expected_response) # Mock the API response @@ -456,8 +665,9 @@ def test_create_tag_template_field_exception(self): def test_update_tag_template_field(self): # Setup Expected Response + name_2 = "name2-1052831874" display_name = "displayName1615086568" - expected_response = {"display_name": display_name} + expected_response = {"name": name_2, "display_name": display_name} expected_response = tags_pb2.TagTemplateField(**expected_response) # Mock the API response @@ -498,8 +708,9 @@ def test_update_tag_template_field_exception(self): def test_rename_tag_template_field(self): # Setup Expected Response + name_2 = "name2-1052831874" display_name = "displayName1615086568" - expected_response = {"display_name": display_name} + expected_response = {"name": name_2, "display_name": display_name} expected_response = tags_pb2.TagTemplateField(**expected_response) # Mock the API response diff --git a/datalabeling/CHANGELOG.md b/datalabeling/CHANGELOG.md index a97c3cab80fb..9616fc56ff76 100644 --- a/datalabeling/CHANGELOG.md +++ b/datalabeling/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-datalabeling/#history +## 0.3.0 + +10-10-2019 11:08 PDT + + +### Implementation Changes +- Remove send / receive message size limit (via synth). ([#8950](https://github.com/googleapis/google-cloud-python/pull/8950)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) + ## 0.2.1 07-16-2019 10:17 PDT diff --git a/datalabeling/docs/conf.py b/datalabeling/docs/conf.py index f4fc20a29491..d3bf6996a2f6 100644 --- a/datalabeling/docs/conf.py +++ b/datalabeling/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/datalabeling/setup.py b/datalabeling/setup.py index d72653c651fe..0cc796762ebf 100644 --- a/datalabeling/setup.py +++ b/datalabeling/setup.py @@ -21,7 +21,7 @@ name = "google-cloud-datalabeling" description = "Data Labeling API client library" -version = "0.2.1" +version = "0.3.0" release_status = "Development Status :: 4 - Beta" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", diff --git a/dataproc/docs/conf.py b/dataproc/docs/conf.py index d51adac42167..29a4ab9935bc 100644 --- a/dataproc/docs/conf.py +++ b/dataproc/docs/conf.py @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client.py b/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client.py index 4cda9a051e29..82571d6ed3b5 100644 --- a/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client.py +++ b/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client.py @@ -203,7 +203,9 @@ def create_cluster( metadata=None, ): """ - Creates a cluster in a project. + Creates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Example: >>> from google.cloud import dataproc_v1 @@ -306,7 +308,9 @@ def update_cluster( metadata=None, ): """ - Updates a cluster in a project. + Updates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Example: >>> from google.cloud import dataproc_v1 @@ -401,6 +405,10 @@ def update_cluster( config.secondary_worker_config.num_instances Resize secondary worker group + + config.autoscaling_config.policy_uriUse, stop using, or + change autoscaling policies + @@ -489,7 +497,9 @@ def delete_cluster( metadata=None, ): """ - Deletes a cluster in a project. + Deletes a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Example: >>> from google.cloud import dataproc_v1 @@ -779,8 +789,11 @@ def diagnose_cluster( metadata=None, ): """ - Gets cluster diagnostic information. After the operation completes, the - Operation.response field contains ``DiagnoseClusterOutputLocation``. + Gets cluster diagnostic information. The returned ``Operation.metadata`` + will be + `ClusterOperationMetadata `__. + After the operation completes, ``Operation.response`` contains + `DiagnoseClusterResults `__. Example: >>> from google.cloud import dataproc_v1 diff --git a/dataproc/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py b/dataproc/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py index b3c8b7d50f64..3c4a813d437b 100644 --- a/dataproc/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py +++ b/dataproc/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py @@ -119,7 +119,9 @@ def channel(self): def create_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.create_cluster`. - Creates a cluster in a project. + Creates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Returns: Callable: A callable which accepts the appropriate @@ -132,7 +134,9 @@ def create_cluster(self): def update_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.update_cluster`. - Updates a cluster in a project. + Updates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Returns: Callable: A callable which accepts the appropriate @@ -145,7 +149,9 @@ def update_cluster(self): def delete_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.delete_cluster`. - Deletes a cluster in a project. + Deletes a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Returns: Callable: A callable which accepts the appropriate @@ -184,8 +190,11 @@ def list_clusters(self): def diagnose_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.diagnose_cluster`. - Gets cluster diagnostic information. After the operation completes, the - Operation.response field contains ``DiagnoseClusterOutputLocation``. + Gets cluster diagnostic information. The returned ``Operation.metadata`` + will be + `ClusterOperationMetadata `__. + After the operation completes, ``Operation.response`` contains + `DiagnoseClusterResults `__. Returns: Callable: A callable which accepts the appropriate diff --git a/dataproc/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py b/dataproc/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py index 73955041c6fb..86a35d067ba4 100644 --- a/dataproc/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py +++ b/dataproc/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py @@ -160,7 +160,10 @@ def instantiate_workflow_template(self): cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. @@ -189,7 +192,10 @@ def instantiate_inline_workflow_template(self): cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. diff --git a/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py b/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py index b3b38728fc47..0b39b3d67f62 100644 --- a/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py +++ b/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py @@ -241,9 +241,16 @@ def create_workflow_template( >>> response = client.create_workflow_template(parent, template) Args: - parent (str): Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}`` + parent (str): Required. The resource name of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates,create``, the resource name + of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.create``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` template (Union[dict, ~google.cloud.dataproc_v1.types.WorkflowTemplate]): Required. The Dataproc workflow template to create. If a dict is provided, it must be of the same form as the protobuf @@ -322,11 +329,18 @@ def get_workflow_template( >>> response = client.get_workflow_template(name) Args: - name (str): Required. The "resource name" of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + name (str): Required. The resource name of the workflow template, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates.get``, the resource name of + the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.get``, the resource name + of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to retrieve. Only previously - instatiated versions can be retrieved. + instantiated versions can be retrieved. If unspecified, retrieves the current version. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -400,7 +414,10 @@ def instantiate_workflow_template( cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. @@ -423,9 +440,16 @@ def instantiate_workflow_template( >>> metadata = response.metadata() Args: - name (str): Required. The "resource name" of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + name (str): Required. The resource name of the workflow template, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates.instantiate``, the resource + name of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.instantiate``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to instantiate. If specified, the workflow will be instantiated only if the current version of the workflow template has the supplied version. @@ -525,7 +549,10 @@ def instantiate_inline_workflow_template( cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. @@ -551,9 +578,16 @@ def instantiate_inline_workflow_template( >>> metadata = response.metadata() Args: - parent (str): Required. The "resource name" of the workflow template region, as - described in https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}`` + parent (str): Required. The resource name of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates,instantiateinline``, the + resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.instantiateinline``, the + resource name of the location has the following format: + ``projects/{project_id}/locations/{location}`` template (Union[dict, ~google.cloud.dataproc_v1.types.WorkflowTemplate]): Required. The workflow template to instantiate. If a dict is provided, it must be of the same form as the protobuf @@ -738,9 +772,16 @@ def list_workflow_templates( ... pass Args: - parent (str): Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}`` + parent (str): Required. The resource name of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates,list``, the resource name of + the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.list``, the resource name + of the location has the following format: + ``projects/{project_id}/locations/{location}`` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -831,9 +872,16 @@ def delete_workflow_template( >>> client.delete_workflow_template(name) Args: - name (str): Required. The "resource name" of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + name (str): Required. The resource name of the workflow template, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates.delete``, the resource name + of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.instantiate``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to delete. If specified, will only delete the template if the current server version matches specified version. diff --git a/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies.proto b/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies.proto new file mode 100644 index 000000000000..cb466ee851f1 --- /dev/null +++ b/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies.proto @@ -0,0 +1,340 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.dataproc.v1; + +import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/api/client.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; +option java_multiple_files = true; +option java_outer_classname = "AutoscalingPoliciesProto"; +option java_package = "com.google.cloud.dataproc.v1"; + +// The API interface for managing autoscaling policies in the +// Google Cloud Dataproc API. +service AutoscalingPolicyService { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates new autoscaling policy. + rpc CreateAutoscalingPolicy(CreateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*}/autoscalingPolicies" + body: "policy" + additional_bindings { + post: "/v1/{parent=projects/*/regions/*}/autoscalingPolicies" + body: "policy" + } + }; + } + + // Updates (replaces) autoscaling policy. + // + // Disabled check for update_mask, because all updates will be full + // replacements. + rpc UpdateAutoscalingPolicy(UpdateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { + option (google.api.http) = { + put: "/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}" + body: "policy" + additional_bindings { + put: "/v1/{policy.name=projects/*/regions/*/autoscalingPolicies/*}" + body: "policy" + } + }; + } + + // Retrieves autoscaling policy. + rpc GetAutoscalingPolicy(GetAutoscalingPolicyRequest) returns (AutoscalingPolicy) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" + additional_bindings { + get: "/v1/{name=projects/*/regions/*/autoscalingPolicies/*}" + } + }; + } + + // Lists autoscaling policies in the project. + rpc ListAutoscalingPolicies(ListAutoscalingPoliciesRequest) returns (ListAutoscalingPoliciesResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*}/autoscalingPolicies" + additional_bindings { + get: "/v1/{parent=projects/*/regions/*}/autoscalingPolicies" + } + }; + } + + // Deletes an autoscaling policy. It is an error to delete an autoscaling + // policy that is in use by one or more clusters. + rpc DeleteAutoscalingPolicy(DeleteAutoscalingPolicyRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" + additional_bindings { + delete: "/v1/{name=projects/*/regions/*/autoscalingPolicies/*}" + } + }; + } +} + +// Describes an autoscaling policy for Dataproc cluster autoscaler. +message AutoscalingPolicy { + option (google.api.resource) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + pattern: "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}" + }; + + // Required. The policy id. + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). Cannot begin or end with underscore + // or hyphen. Must consist of between 3 and 50 characters. + // + string id = 1; + + // Output only. The "resource name" of the autoscaling policy, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies`, the resource name of the + // policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies`, the resource name of the + // policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Autoscaling algorithm for policy. + oneof algorithm { + BasicAutoscalingAlgorithm basic_algorithm = 3 [(google.api.field_behavior) = REQUIRED]; + } + + // Required. Describes how the autoscaler will operate for primary workers. + InstanceGroupAutoscalingPolicyConfig worker_config = 4 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Describes how the autoscaler will operate for secondary workers. + InstanceGroupAutoscalingPolicyConfig secondary_worker_config = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Basic algorithm for autoscaling. +message BasicAutoscalingAlgorithm { + // Required. YARN autoscaling configuration. + BasicYarnAutoscalingConfig yarn_config = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Duration between scaling events. A scaling period starts after + // the update operation from the previous event has completed. + // + // Bounds: [2m, 1d]. Default: 2m. + google.protobuf.Duration cooldown_period = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// Basic autoscaling configurations for YARN. +message BasicYarnAutoscalingConfig { + // Required. Timeout for YARN graceful decommissioning of Node Managers. + // Specifies the duration to wait for jobs to complete before forcefully + // removing workers (and potentially interrupting jobs). Only applicable to + // downscaling operations. + // + // Bounds: [0s, 1d]. + google.protobuf.Duration graceful_decommission_timeout = 5 [(google.api.field_behavior) = REQUIRED]; + + // Required. Fraction of average pending memory in the last cooldown period + // for which to add workers. A scale-up factor of 1.0 will result in scaling + // up so that there is no pending memory remaining after the update (more + // aggressive scaling). A scale-up factor closer to 0 will result in a smaller + // magnitude of scaling up (less aggressive scaling). + // + // Bounds: [0.0, 1.0]. + double scale_up_factor = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. Fraction of average pending memory in the last cooldown period + // for which to remove workers. A scale-down factor of 1 will result in + // scaling down so that there is no available memory remaining after the + // update (more aggressive scaling). A scale-down factor of 0 disables + // removing workers, which can be beneficial for autoscaling a single job. + // + // Bounds: [0.0, 1.0]. + double scale_down_factor = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Minimum scale-up threshold as a fraction of total cluster size + // before scaling occurs. For example, in a 20-worker cluster, a threshold of + // 0.1 means the autoscaler must recommend at least a 2-worker scale-up for + // the cluster to scale. A threshold of 0 means the autoscaler will scale up + // on any recommended change. + // + // Bounds: [0.0, 1.0]. Default: 0.0. + double scale_up_min_worker_fraction = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Minimum scale-down threshold as a fraction of total cluster size + // before scaling occurs. For example, in a 20-worker cluster, a threshold of + // 0.1 means the autoscaler must recommend at least a 2 worker scale-down for + // the cluster to scale. A threshold of 0 means the autoscaler will scale down + // on any recommended change. + // + // Bounds: [0.0, 1.0]. Default: 0.0. + double scale_down_min_worker_fraction = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Configuration for the size bounds of an instance group, including its +// proportional size to other groups. +message InstanceGroupAutoscalingPolicyConfig { + // Optional. Minimum number of instances for this group. + // + // Primary workers - Bounds: [2, max_instances]. Default: 2. + // Secondary workers - Bounds: [0, max_instances]. Default: 0. + int32 min_instances = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Maximum number of instances for this group. Required for primary + // workers. Note that by default, clusters will not use secondary workers. + // Required for secondary workers if the minimum secondary instances is set. + // + // Primary workers - Bounds: [min_instances, ). + // Secondary workers - Bounds: [min_instances, ). Default: 0. + int32 max_instances = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Weight for the instance group, which is used to determine the + // fraction of total workers in the cluster from this instance group. + // For example, if primary workers have weight 2, and secondary workers have + // weight 1, the cluster will have approximately 2 primary workers for each + // secondary worker. + // + // The cluster may not reach the specified balance if constrained + // by min/max bounds or other autoscaling settings. For example, if + // `max_instances` for secondary workers is 0, then only primary workers will + // be added. The cluster can also be out of balance when created. + // + // If weight is not set on any instance group, the cluster will default to + // equal weight for all groups: the cluster will attempt to maintain an equal + // number of workers in each group within the configured size bounds for each + // group. If weight is set for one group only, the cluster will default to + // zero weight on the unset group. For example if weight is set only on + // primary workers, the cluster will use primary workers only and no + // secondary workers. + int32 weight = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to create an autoscaling policy. +message CreateAutoscalingPolicyRequest { + // Required. The "resource name" of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.create`, the resource name + // of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.autoscalingPolicies.create`, the resource name + // of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; + + // The autoscaling policy to create. + AutoscalingPolicy policy = 2; +} + +// A request to fetch an autoscaling policy. +message GetAutoscalingPolicyRequest { + // Required. The "resource name" of the autoscaling policy, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.get`, the resource name + // of the policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies.get`, the resource name + // of the policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; +} + +// A request to update an autoscaling policy. +message UpdateAutoscalingPolicyRequest { + // Required. The updated autoscaling policy. + AutoscalingPolicy policy = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to delete an autoscaling policy. +// +// Autoscaling policies in use by one or more clusters will not be deleted. +message DeleteAutoscalingPolicyRequest { + // Required. The "resource name" of the autoscaling policy, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.delete`, the resource name + // of the policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies.delete`, the resource name + // of the policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; +} + +// A request to list autoscaling policies in a project. +message ListAutoscalingPoliciesRequest { + // Required. The "resource name" of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.list`, the resource name + // of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.autoscalingPolicies.list`, the resource name + // of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; + + // Optional. The maximum number of results to return in each response. + // Must be less than or equal to 1000. Defaults to 100. + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The page token, returned by a previous call, to request the + // next page of results. + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// A response to a request to list autoscaling policies in a project. +message ListAutoscalingPoliciesResponse { + // Output only. Autoscaling policies list. + repeated AutoscalingPolicy policies = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. This token is included in the response if there are more + // results to fetch. + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; +} diff --git a/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py new file mode 100644 index 000000000000..0dc596cd2eca --- /dev/null +++ b/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py @@ -0,0 +1,1207 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/dataproc_v1/proto/autoscaling_policies.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/dataproc_v1/proto/autoscaling_policies.proto", + package="google.cloud.dataproc.v1", + syntax="proto3", + serialized_options=_b( + "\n\034com.google.cloud.dataproc.v1B\030AutoscalingPoliciesProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" + ), + serialized_pb=_b( + '\n9google/cloud/dataproc_v1/proto/autoscaling_policies.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x17google/api/client.proto"\xd4\x03\n\x11\x41utoscalingPolicy\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12S\n\x0f\x62\x61sic_algorithm\x18\x03 \x01(\x0b\x32\x33.google.cloud.dataproc.v1.BasicAutoscalingAlgorithmB\x03\xe0\x41\x02H\x00\x12Z\n\rworker_config\x18\x04 \x01(\x0b\x32>.google.cloud.dataproc.v1.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x02\x12\x64\n\x17secondary_worker_config\x18\x05 \x01(\x0b\x32>.google.cloud.dataproc.v1.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x01:|\xea\x41y\n)dataproc.googleapis.com/AutoscalingPolicy\x12Lprojects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}B\x0b\n\talgorithm"\xa4\x01\n\x19\x42\x61sicAutoscalingAlgorithm\x12N\n\x0byarn_config\x18\x01 \x01(\x0b\x32\x34.google.cloud.dataproc.v1.BasicYarnAutoscalingConfigB\x03\xe0\x41\x02\x12\x37\n\x0f\x63ooldown_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\xf9\x01\n\x1a\x42\x61sicYarnAutoscalingConfig\x12\x45\n\x1dgraceful_decommission_timeout\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12\x1c\n\x0fscale_up_factor\x18\x01 \x01(\x01\x42\x03\xe0\x41\x02\x12\x1e\n\x11scale_down_factor\x18\x02 \x01(\x01\x42\x03\xe0\x41\x02\x12)\n\x1cscale_up_min_worker_fraction\x18\x03 \x01(\x01\x42\x03\xe0\x41\x01\x12+\n\x1escale_down_min_worker_fraction\x18\x04 \x01(\x01\x42\x03\xe0\x41\x01"s\n$InstanceGroupAutoscalingPolicyConfig\x12\x1a\n\rmin_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1a\n\rmax_instances\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02\x12\x13\n\x06weight\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01"\xa0\x01\n\x1e\x43reateAutoscalingPolicyRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12;\n\x06policy\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicy"^\n\x1bGetAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"b\n\x1eUpdateAutoscalingPolicyRequest\x12@\n\x06policy\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicyB\x03\xe0\x41\x02"a\n\x1e\x44\x65leteAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"\x94\x01\n\x1eListAutoscalingPoliciesRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x83\x01\n\x1fListAutoscalingPoliciesResponse\x12\x42\n\x08policies\x18\x01 \x03(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicyB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\x32\xfd\n\n\x18\x41utoscalingPolicyService\x12\x8c\x02\n\x17\x43reateAutoscalingPolicy\x12\x38.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest\x1a+.google.cloud.dataproc.v1.AutoscalingPolicy"\x89\x01\x82\xd3\xe4\x93\x02\x82\x01"7/v1/{parent=projects/*/locations/*}/autoscalingPolicies:\x06policyZ?"5/v1/{parent=projects/*/regions/*}/autoscalingPolicies:\x06policy\x12\x9a\x02\n\x17UpdateAutoscalingPolicy\x12\x38.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest\x1a+.google.cloud.dataproc.v1.AutoscalingPolicy"\x97\x01\x82\xd3\xe4\x93\x02\x90\x01\x1a>/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}:\x06policyZF\x1a/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}:\006policyZF\032 labels = 8; + map labels = 8 [(google.api.field_behavior) = OPTIONAL]; // Output only. Cluster status. - ClusterStatus status = 4; + ClusterStatus status = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The previous cluster status. - repeated ClusterStatus status_history = 7; + repeated ClusterStatus status_history = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc // generates this value when it creates the cluster. - string cluster_uuid = 6; + string cluster_uuid = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Contains cluster daemon metrics such as HDFS and YARN stats. // @@ -132,26 +169,26 @@ message ClusterConfig { // and manage this project-level, per-location bucket (see // [Cloud Dataproc staging // bucket](/dataproc/docs/concepts/configuring-clusters/staging-bucket)). - string config_bucket = 1; + string config_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The shared Compute Engine config settings for // all instances in a cluster. - GceClusterConfig gce_cluster_config = 8; + GceClusterConfig gce_cluster_config = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // the master instance in a cluster. - InstanceGroupConfig master_config = 9; + InstanceGroupConfig master_config = 9 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // worker instances in a cluster. - InstanceGroupConfig worker_config = 10; + InstanceGroupConfig worker_config = 10 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // additional worker instances in a cluster. - InstanceGroupConfig secondary_worker_config = 12; + InstanceGroupConfig secondary_worker_config = 12 [(google.api.field_behavior) = OPTIONAL]; // Optional. The config settings for software inside the cluster. - SoftwareConfig software_config = 13; + SoftwareConfig software_config = 13 [(google.api.field_behavior) = OPTIONAL]; // Optional. Commands to execute on each node after config is // completed. By default, executables are run on master and all worker nodes. @@ -166,17 +203,38 @@ message ClusterConfig { // else // ... worker specific actions ... // fi - repeated NodeInitializationAction initialization_actions = 11; + repeated NodeInitializationAction initialization_actions = 11 [(google.api.field_behavior) = OPTIONAL]; // Optional. Encryption settings for the cluster. - EncryptionConfig encryption_config = 15; + EncryptionConfig encryption_config = 15 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Autoscaling config for the policy associated with the cluster. + // Cluster does not autoscale if this field is unset. + AutoscalingConfig autoscaling_config = 18 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Security settings for the cluster. + SecurityConfig security_config = 16 [(google.api.field_behavior) = OPTIONAL]; +} + +// Autoscaling Policy config associated with the cluster. +message AutoscalingConfig { + // Optional. The autoscaling policy used by the cluster. + // + // Only resource names including projectid and location (region) are valid. + // Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` + // * `projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` + // + // Note that the policy must be in the same project and Cloud Dataproc region. + string policy_uri = 1 [(google.api.field_behavior) = OPTIONAL]; } // Encryption settings for the cluster. message EncryptionConfig { // Optional. The Cloud KMS key name to use for PD disk encryption for all // instances in the cluster. - string gce_pd_kms_key_name = 1; + string gce_pd_kms_key_name = 1 [(google.api.field_behavior) = OPTIONAL]; } // Common config settings for resources of Compute Engine cluster @@ -193,7 +251,7 @@ message GceClusterConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]` // * `projects/[project_id]/zones/[zone]` // * `us-central1-f` - string zone_uri = 1; + string zone_uri = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine network to be used for machine // communications. Cannot be specified with subnetwork_uri. If neither @@ -206,7 +264,7 @@ message GceClusterConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default` // * `projects/[project_id]/regions/global/default` // * `default` - string network_uri = 2; + string network_uri = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine subnetwork to be used for machine // communications. Cannot be specified with network_uri. @@ -216,7 +274,7 @@ message GceClusterConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0` // * `projects/[project_id]/regions/us-east1/subnetworks/sub0` // * `sub0` - string subnetwork_uri = 6; + string subnetwork_uri = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. If true, all instances in the cluster will only have internal IP // addresses. By default, clusters are not restricted to internal IP @@ -224,7 +282,7 @@ message GceClusterConfig { // instance. This `internal_ip_only` restriction can only be enabled for // subnetwork enabled networks, and all off-cluster dependencies must be // configured to be accessible without external IP addresses. - bool internal_ip_only = 7; + bool internal_ip_only = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The service account of the instances. Defaults to the default // Compute Engine service account. Custom service accounts need @@ -237,7 +295,7 @@ message GceClusterConfig { // https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts // for more information). // Example: `[account_id]@[project_id].iam.gserviceaccount.com` - string service_account = 8; + string service_account = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. The URIs of service account scopes to be included in // Compute Engine instances. The following base set of scopes is always @@ -253,7 +311,7 @@ message GceClusterConfig { // * https://www.googleapis.com/auth/bigtable.admin.table // * https://www.googleapis.com/auth/bigtable.data // * https://www.googleapis.com/auth/devstorage.full_control - repeated string service_account_scopes = 3; + repeated string service_account_scopes = 3 [(google.api.field_behavior) = OPTIONAL]; // The Compute Engine tags to add to all instances (see // [Tagging instances](/compute/docs/label-or-tag-resources#tags)). @@ -270,16 +328,16 @@ message GceClusterConfig { message InstanceGroupConfig { // Optional. The number of VM instances in the instance group. // For master instance groups, must be set to 1. - int32 num_instances = 1; + int32 num_instances = 1 [(google.api.field_behavior) = OPTIONAL]; // Output only. The list of instance names. Cloud Dataproc derives the names // from `cluster_name`, `num_instances`, and the instance group. - repeated string instance_names = 2; + repeated string instance_names = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The Compute Engine image resource used for cluster // instances. It can be specified or may be inferred from // `SoftwareConfig.image_version`. - string image_uri = 3; + string image_uri = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine machine type used for cluster instances. // @@ -294,36 +352,38 @@ message InstanceGroupConfig { // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) // feature, you must use the short name of the machine type // resource, for example, `n1-standard-2`. - string machine_type_uri = 4; + string machine_type_uri = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. Disk option config settings. - DiskConfig disk_config = 5; + DiskConfig disk_config = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. Specifies that this instance group contains preemptible // instances. - bool is_preemptible = 6; + bool is_preemptible = 6 [(google.api.field_behavior) = OPTIONAL]; // Output only. The config for Compute Engine Instance Group // Manager that manages this group. // This is only used for preemptible instance groups. - ManagedGroupConfig managed_group_config = 7; + ManagedGroupConfig managed_group_config = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The Compute Engine accelerator configuration for these // instances. - // - // **Beta Feature**: This feature is still under development. It may be - // changed before final release. - repeated AcceleratorConfig accelerators = 8; + repeated AcceleratorConfig accelerators = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Specifies the minimum cpu platform for the Instance Group. + // See [Cloud Dataproc→Minimum CPU Platform] + // (/dataproc/docs/concepts/compute/dataproc-min-cpu). + string min_cpu_platform = 9 [(google.api.field_behavior) = OPTIONAL]; } // Specifies the resources used to actively manage an instance group. message ManagedGroupConfig { // Output only. The name of the Instance Template used for the Managed // Instance Group. - string instance_template_name = 1; + string instance_template_name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The name of the Instance Group Manager for this group. - string instance_group_manager_name = 2; + string instance_group_manager_name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Specifies the type and number of accelerator cards attached to the instances @@ -356,10 +416,10 @@ message DiskConfig { // Optional. Type of the boot disk (default is "pd-standard"). // Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or // "pd-standard" (Persistent Disk Hard Disk Drive). - string boot_disk_type = 3; + string boot_disk_type = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Size in GB of the boot disk (default is 500GB). - int32 boot_disk_size_gb = 1; + int32 boot_disk_size_gb = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. Number of attached SSDs, from 0 to 4 (default is 0). // If SSDs are not attached, the boot disk is used to store runtime logs and @@ -367,20 +427,20 @@ message DiskConfig { // If one or more SSDs are attached, this runtime bulk // data is spread across them, and the boot disk contains only basic // config and installed binaries. - int32 num_local_ssds = 2; + int32 num_local_ssds = 2 [(google.api.field_behavior) = OPTIONAL]; } // Specifies an executable to run on a fully configured node and a // timeout period for executable completion. message NodeInitializationAction { // Required. Cloud Storage URI of executable file. - string executable_file = 1; + string executable_file = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Amount of time executable has to complete. Default is // 10 minutes. Cluster creation fails with an explanatory error message (the // name of the executable that caused the error and the exceeded timeout // period) if the executable is not completed at end of the timeout period. - google.protobuf.Duration execution_timeout = 2; + google.protobuf.Duration execution_timeout = 2 [(google.api.field_behavior) = OPTIONAL]; } // The status of a cluster and its instances. @@ -426,17 +486,95 @@ message ClusterStatus { } // Output only. The cluster's state. - State state = 1; + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. Optional details of cluster's state. - string detail = 2; + // Optional. Output only. Details of cluster's state. + string detail = 2 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; // Output only. Time when this state was entered. - google.protobuf.Timestamp state_start_time = 3; + google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Additional state information that includes // status reported by the agent. - Substate substate = 4; + Substate substate = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Security related configuration, including Kerberos. +message SecurityConfig { + // Kerberos related configuration. + KerberosConfig kerberos_config = 1; +} + +// Specifies Kerberos related configuration. +message KerberosConfig { + // Optional. Flag to indicate whether to Kerberize the cluster. + bool enable_kerberos = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The Cloud Storage URI of a KMS encrypted file containing the root + // principal password. + string root_principal_password_uri = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The uri of the KMS key used to encrypt various sensitive + // files. + string kms_key_uri = 3 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The Cloud Storage URI of the keystore file used for SSL + // encryption. If not provided, Dataproc will provide a self-signed + // certificate. + string keystore_uri = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of the truststore file used for SSL + // encryption. If not provided, Dataproc will provide a self-signed + // certificate. + string truststore_uri = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // password to the user provided keystore. For the self-signed certificate, + // this password is generated by Dataproc. + string keystore_password_uri = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // password to the user provided key. For the self-signed certificate, this + // password is generated by Dataproc. + string key_password_uri = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // password to the user provided truststore. For the self-signed certificate, + // this password is generated by Dataproc. + string truststore_password_uri = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The remote realm the Dataproc on-cluster KDC will trust, should + // the user enable cross realm trust. + string cross_realm_trust_realm = 9 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The KDC (IP or hostname) for the remote trusted realm in a cross + // realm trust relationship. + string cross_realm_trust_kdc = 10 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The admin server (IP or hostname) for the remote trusted realm in + // a cross realm trust relationship. + string cross_realm_trust_admin_server = 11 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // shared password between the on-cluster Kerberos realm and the remote + // trusted realm, in a cross realm trust relationship. + string cross_realm_trust_shared_password_uri = 12 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // master key of the KDC database. + string kdc_db_key_uri = 13 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The lifetime of the ticket granting ticket, in hours. + // If not specified, or user specifies 0, then default value 10 + // will be used. + int32 tgt_lifetime_hours = 14 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The name of the on-cluster Kerberos realm. + // If not specified, the uppercased domain of hostnames will be the realm. + string realm = 15 [(google.api.field_behavior) = OPTIONAL]; } // Specifies the selection and config of software inside the cluster. @@ -448,7 +586,7 @@ message SoftwareConfig { // ["preview" // version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). // If unspecified, it defaults to the latest Debian version. - string image_version = 1; + string image_version = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The properties to set on daemon config files. // @@ -468,10 +606,10 @@ message SoftwareConfig { // // For more information, see // [Cluster properties](/dataproc/docs/concepts/cluster-properties). - map properties = 2; + map properties = 2 [(google.api.field_behavior) = OPTIONAL]; - // The set of optional components to activate on the cluster. - repeated Component optional_components = 3; + // Optional. The set of components to activate on the cluster. + repeated Component optional_components = 3 [(google.api.field_behavior) = OPTIONAL]; } // Contains cluster daemon metrics, such as HDFS and YARN stats. @@ -490,13 +628,13 @@ message ClusterMetrics { message CreateClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster to create. - Cluster cluster = 2; + Cluster cluster = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server // receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests with the same @@ -509,23 +647,23 @@ message CreateClusterRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4; + string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; } // A request to update a cluster. message UpdateClusterRequest { // Required. The ID of the Google Cloud Platform project the // cluster belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 5; + string region = 5 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The changes to the cluster. - Cluster cluster = 3; + Cluster cluster = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. Timeout for graceful YARN decomissioning. Graceful // decommissioning allows removing nodes from the cluster without @@ -535,7 +673,7 @@ message UpdateClusterRequest { // the maximum allowed timeout is 1 day. // // Only supported on Dataproc image versions 1.2 and higher. - google.protobuf.Duration graceful_decommission_timeout = 6; + google.protobuf.Duration graceful_decommission_timeout = 6 [(google.api.field_behavior) = OPTIONAL]; // Required. Specifies the path, relative to `Cluster`, of // the field to update. For example, to change the number of workers @@ -582,9 +720,13 @@ message UpdateClusterRequest { // config.secondary_worker_config.num_instances // Resize secondary worker group // + // + // config.autoscaling_config.policy_uriUse, stop using, or + // change autoscaling policies + // // // - google.protobuf.FieldMask update_mask = 4; + google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server // receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests with the same @@ -597,24 +739,24 @@ message UpdateClusterRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 7; + string request_id = 7 [(google.api.field_behavior) = OPTIONAL]; } // A request to delete a cluster. message DeleteClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. Specifying the `cluster_uuid` means the RPC should fail // (with error NOT_FOUND) if cluster with specified UUID does not exist. - string cluster_uuid = 4; + string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A unique id used to identify the request. If the server // receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests with the same @@ -627,30 +769,30 @@ message DeleteClusterRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5; + string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; } // Request to get the resource representation for a cluster in a project. message GetClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to list the clusters in a project. message ListClustersRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 4; + string region = 4 [(google.api.field_behavior) = REQUIRED]; // Optional. A filter constraining the clusters to list. Filters are // case-sensitive and have the following syntax: @@ -671,37 +813,37 @@ message ListClustersRequest { // // status.state = ACTIVE AND clusterName = mycluster // AND labels.env = staging AND labels.starred = * - string filter = 5; + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. The standard List page size. - int32 page_size = 2; + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The standard List page token. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; } // The list of all clusters in a project. message ListClustersResponse { // Output only. The clusters in the project. - repeated Cluster clusters = 1; + repeated Cluster clusters = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. This token is included in the response if there are more // results to fetch. To fetch additional results, provide this value as the // `page_token` in a subsequent `ListClustersRequest`. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to collect cluster diagnostic information. message DiagnoseClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; } // The location of diagnostic output. @@ -709,5 +851,5 @@ message DiagnoseClusterResults { // Output only. The Cloud Storage URI of the diagnostic output. // The output report is a plain text file with a summary of collected // diagnostics. - string output_uri = 1; + string output_uri = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py index a5f3e98dd301..ca9065f58faa 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py +++ b/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py @@ -16,6 +16,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.dataproc_v1.proto import ( operations_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_operations__pb2, ) @@ -38,10 +40,12 @@ "\n\034com.google.cloud.dataproc.v1B\rClustersProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" ), serialized_pb=_b( - '\n-google/cloud/dataproc_v1/proto/clusters.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a/google/cloud/dataproc_v1/proto/operations.proto\x1a+google/cloud/dataproc_v1/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xa5\x03\n\x07\x43luster\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x37\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfig\x12=\n\x06labels\x18\x08 \x03(\x0b\x32-.google.cloud.dataproc.v1.Cluster.LabelsEntry\x12\x37\n\x06status\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatus\x12?\n\x0estatus_history\x18\x07 \x03(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatus\x12\x14\n\x0c\x63luster_uuid\x18\x06 \x01(\t\x12\x39\n\x07metrics\x18\t \x01(\x0b\x32(.google.cloud.dataproc.v1.ClusterMetrics\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa8\x04\n\rClusterConfig\x12\x15\n\rconfig_bucket\x18\x01 \x01(\t\x12\x46\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32*.google.cloud.dataproc.v1.GceClusterConfig\x12\x44\n\rmaster_config\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfig\x12\x44\n\rworker_config\x18\n \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfig\x12N\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfig\x12\x41\n\x0fsoftware_config\x18\r \x01(\x0b\x32(.google.cloud.dataproc.v1.SoftwareConfig\x12R\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x32.google.cloud.dataproc.v1.NodeInitializationAction\x12\x45\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32*.google.cloud.dataproc.v1.EncryptionConfig"/\n\x10\x45ncryptionConfig\x12\x1b\n\x13gce_pd_kms_key_name\x18\x01 \x01(\t"\xaf\x02\n\x10GceClusterConfig\x12\x10\n\x08zone_uri\x18\x01 \x01(\t\x12\x13\n\x0bnetwork_uri\x18\x02 \x01(\t\x12\x16\n\x0esubnetwork_uri\x18\x06 \x01(\t\x12\x18\n\x10internal_ip_only\x18\x07 \x01(\x08\x12\x17\n\x0fservice_account\x18\x08 \x01(\t\x12\x1e\n\x16service_account_scopes\x18\x03 \x03(\t\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xd3\x02\n\x13InstanceGroupConfig\x12\x15\n\rnum_instances\x18\x01 \x01(\x05\x12\x16\n\x0einstance_names\x18\x02 \x03(\t\x12\x11\n\timage_uri\x18\x03 \x01(\t\x12\x18\n\x10machine_type_uri\x18\x04 \x01(\t\x12\x39\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.DiskConfig\x12\x16\n\x0eis_preemptible\x18\x06 \x01(\x08\x12J\n\x14managed_group_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1.ManagedGroupConfig\x12\x41\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32+.google.cloud.dataproc.v1.AcceleratorConfig"Y\n\x12ManagedGroupConfig\x12\x1e\n\x16instance_template_name\x18\x01 \x01(\t\x12#\n\x1binstance_group_manager_name\x18\x02 \x01(\t"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"W\n\nDiskConfig\x12\x16\n\x0e\x62oot_disk_type\x18\x03 \x01(\t\x12\x19\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x12\x16\n\x0enum_local_ssds\x18\x02 \x01(\x05"i\n\x18NodeInitializationAction\x12\x17\n\x0f\x65xecutable_file\x18\x01 \x01(\t\x12\x34\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xed\x02\n\rClusterStatus\x12<\n\x05state\x18\x01 \x01(\x0e\x32-.google.cloud.dataproc.v1.ClusterStatus.State\x12\x0e\n\x06\x64\x65tail\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x42\n\x08substate\x18\x04 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.ClusterStatus.Substate"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"\xea\x01\n\x0eSoftwareConfig\x12\x15\n\rimage_version\x18\x01 \x01(\t\x12L\n\nproperties\x18\x02 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntry\x12@\n\x13optional_components\x18\x03 \x03(\x0e\x32#.google.cloud.dataproc.v1.Component\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x02\n\x0e\x43lusterMetrics\x12O\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry\x12O\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x82\x01\n\x14\x43reateClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x32\n\x07\x63luster\x18\x02 \x01(\x0b\x32!.google.cloud.dataproc.v1.Cluster\x12\x12\n\nrequest_id\x18\x04 \x01(\t"\x8b\x02\n\x14UpdateClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x05 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x32\n\x07\x63luster\x18\x03 \x01(\x0b\x32!.google.cloud.dataproc.v1.Cluster\x12@\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x12\n\nrequest_id\x18\x07 \x01(\t"z\n\x14\x44\x65leteClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x04 \x01(\t\x12\x12\n\nrequest_id\x18\x05 \x01(\t"M\n\x11GetClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t"p\n\x13ListClustersRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x04 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"d\n\x14ListClustersResponse\x12\x33\n\x08\x63lusters\x18\x01 \x03(\x0b\x32!.google.cloud.dataproc.v1.Cluster\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"R\n\x16\x44iagnoseClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t",\n\x16\x44iagnoseClusterResults\x12\x12\n\noutput_uri\x18\x01 \x01(\t2\xb2\x08\n\x11\x43lusterController\x12\xa4\x01\n\rCreateCluster\x12..google.cloud.dataproc.v1.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"D\x82\xd3\xe4\x93\x02>"3/v1/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\x12\xb3\x01\n\rUpdateCluster\x12..google.cloud.dataproc.v1.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"S\x82\xd3\xe4\x93\x02M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\x12\xaa\x01\n\rDeleteCluster\x12..google.cloud.dataproc.v1.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"J\x82\xd3\xe4\x93\x02\x44*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\x12\xa8\x01\n\nGetCluster\x12+.google.cloud.dataproc.v1.GetClusterRequest\x1a!.google.cloud.dataproc.v1.Cluster"J\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\x12\xaa\x01\n\x0cListClusters\x12-.google.cloud.dataproc.v1.ListClustersRequest\x1a..google.cloud.dataproc.v1.ListClustersResponse";\x82\xd3\xe4\x93\x02\x35\x12\x33/v1/projects/{project_id}/regions/{region}/clusters\x12\xba\x01\n\x0f\x44iagnoseCluster\x12\x30.google.cloud.dataproc.v1.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"V\x82\xd3\xe4\x93\x02P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*Bq\n\x1c\x63om.google.cloud.dataproc.v1B\rClustersProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' + '\n-google/cloud/dataproc_v1/proto/clusters.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a/google/cloud/dataproc_v1/proto/operations.proto\x1a+google/cloud/dataproc_v1/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xb9\x03\n\x07\x43luster\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x37\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfig\x12\x42\n\x06labels\x18\x08 \x03(\x0b\x32-.google.cloud.dataproc.v1.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12<\n\x06status\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x44\n\x0estatus_history\x18\x07 \x03(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x07metrics\x18\t \x01(\x0b\x32(.google.cloud.dataproc.v1.ClusterMetrics\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xe6\x05\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12K\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32*.google.cloud.dataproc.v1.GceClusterConfigB\x03\xe0\x41\x01\x12I\n\rmaster_config\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12I\n\rworker_config\x18\n \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12S\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsoftware_config\x18\r \x01(\x0b\x32(.google.cloud.dataproc.v1.SoftwareConfigB\x03\xe0\x41\x01\x12W\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x32.google.cloud.dataproc.v1.NodeInitializationActionB\x03\xe0\x41\x01\x12J\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32*.google.cloud.dataproc.v1.EncryptionConfigB\x03\xe0\x41\x01\x12L\n\x12\x61utoscaling_config\x18\x12 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsecurity_config\x18\x10 \x01(\x0b\x32(.google.cloud.dataproc.v1.SecurityConfigB\x03\xe0\x41\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\xcd\x02\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12>\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x01\x12O\n\x14managed_group_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1.ManagedGroupConfigB\x03\xe0\x41\x03\x12\x46\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32+.google.cloud.dataproc.v1.AcceleratorConfigB\x03\xe0\x41\x01\x12\x1d\n\x10min_cpu_platform\x18\t \x01(\tB\x03\xe0\x41\x01"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"f\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0enum_local_ssds\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x84\x03\n\rClusterStatus\x12\x41\n\x05state\x18\x01 \x01(\x0e\x32-.google.cloud.dataproc.v1.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x16\n\x06\x64\x65tail\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12G\n\x08substate\x18\x04 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"S\n\x0eSecurityConfig\x12\x41\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"\xf9\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12Q\n\nproperties\x18\x02 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32#.google.cloud.dataproc.v1.ComponentB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x02\n\x0e\x43lusterMetrics\x12O\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry\x12O\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x96\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x02 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xae\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x03 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"n\n\x14ListClustersResponse\x12\x38\n\x08\x63lusters\x18\x01 \x03(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03\x32\xae\x0c\n\x11\x43lusterController\x12\x80\x02\n\rCreateCluster\x12..google.cloud.dataproc.v1.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02>"3/v1/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x19project_id,region,cluster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xf3\x01\n\rUpdateCluster\x12..google.cloud.dataproc.v1.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\x92\x01\x82\xd3\xe4\x93\x02M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\x99\x02\n\rDeleteCluster\x12..google.cloud.dataproc.v1.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xb8\x01\x82\xd3\xe4\x93\x02\x44*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\xca\x41J\n\x15google.protobuf.Empty\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xc9\x01\n\nGetCluster\x12+.google.cloud.dataproc.v1.GetClusterRequest\x1a!.google.cloud.dataproc.v1.Cluster"k\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\x12\xd9\x01\n\x0cListClusters\x12-.google.cloud.dataproc.v1.ListClustersRequest\x1a..google.cloud.dataproc.v1.ListClustersResponse"j\x82\xd3\xe4\x93\x02\x35\x12\x33/v1/projects/{project_id}/regions/{region}/clusters\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x8e\x02\n\x0f\x44iagnoseCluster\x12\x30.google.cloud.dataproc.v1.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xa9\x01\x82\xd3\xe4\x93\x02P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41\x1eproject_id,region,cluster_name\xca\x41/\n\x15google.protobuf.Empty\x12\x16\x44iagnoseClusterResults\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBq\n\x1c\x63om.google.cloud.dataproc.v1B\rClustersProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1_dot_proto_dot_operations__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1_dot_proto_dot_shared__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, @@ -79,8 +83,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2594, - serialized_end=2680, + serialized_start=3072, + serialized_end=3158, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_STATE) @@ -102,8 +106,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2682, - serialized_end=2742, + serialized_start=3160, + serialized_end=3220, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_SUBSTATE) @@ -160,8 +164,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=712, - serialized_end=757, + serialized_start=790, + serialized_end=835, ) _CLUSTER = _descriptor.Descriptor( @@ -240,7 +244,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -258,7 +262,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -276,7 +280,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -294,7 +298,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -324,8 +328,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=336, - serialized_end=757, + serialized_start=394, + serialized_end=835, ) @@ -351,7 +355,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -369,7 +373,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -387,7 +391,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -405,7 +409,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -423,7 +427,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -441,7 +445,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -459,7 +463,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -477,7 +481,43 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="autoscaling_config", + full_name="google.cloud.dataproc.v1.ClusterConfig.autoscaling_config", + index=8, + number=18, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="security_config", + full_name="google.cloud.dataproc.v1.ClusterConfig.security_config", + index=9, + number=16, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -489,8 +529,47 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=760, - serialized_end=1312, + serialized_start=838, + serialized_end=1580, +) + + +_AUTOSCALINGCONFIG = _descriptor.Descriptor( + name="AutoscalingConfig", + full_name="google.cloud.dataproc.v1.AutoscalingConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="policy_uri", + full_name="google.cloud.dataproc.v1.AutoscalingConfig.policy_uri", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1582, + serialized_end=1626, ) @@ -516,7 +595,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -528,8 +607,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1314, - serialized_end=1361, + serialized_start=1628, + serialized_end=1680, ) @@ -585,8 +664,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1620, - serialized_end=1667, + serialized_start=1969, + serialized_end=2016, ) _GCECLUSTERCONFIG = _descriptor.Descriptor( @@ -611,7 +690,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -629,7 +708,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -647,7 +726,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -665,7 +744,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -683,7 +762,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -701,7 +780,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -749,8 +828,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1364, - serialized_end=1667, + serialized_start=1683, + serialized_end=2016, ) @@ -776,7 +855,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -794,7 +873,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -812,7 +891,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -830,7 +909,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -848,7 +927,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -866,7 +945,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -884,7 +963,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -902,7 +981,25 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="min_cpu_platform", + full_name="google.cloud.dataproc.v1.InstanceGroupConfig.min_cpu_platform", + index=8, + number=9, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -914,8 +1011,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1670, - serialized_end=2009, + serialized_start=2019, + serialized_end=2429, ) @@ -941,7 +1038,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -959,7 +1056,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -971,8 +1068,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2011, - serialized_end=2100, + serialized_start=2431, + serialized_end=2530, ) @@ -1028,8 +1125,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2102, - serialized_end=2178, + serialized_start=2532, + serialized_end=2608, ) @@ -1055,7 +1152,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1073,7 +1170,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1091,7 +1188,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1103,8 +1200,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2180, - serialized_end=2267, + serialized_start=2610, + serialized_end=2712, ) @@ -1130,7 +1227,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1148,7 +1245,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1160,8 +1257,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2269, - serialized_end=2374, + serialized_start=2714, + serialized_end=2829, ) @@ -1187,7 +1284,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1205,7 +1302,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1223,7 +1320,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1241,7 +1338,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1253,8 +1350,338 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2377, - serialized_end=2742, + serialized_start=2832, + serialized_end=3220, +) + + +_SECURITYCONFIG = _descriptor.Descriptor( + name="SecurityConfig", + full_name="google.cloud.dataproc.v1.SecurityConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="kerberos_config", + full_name="google.cloud.dataproc.v1.SecurityConfig.kerberos_config", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3222, + serialized_end=3305, +) + + +_KERBEROSCONFIG = _descriptor.Descriptor( + name="KerberosConfig", + full_name="google.cloud.dataproc.v1.KerberosConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="enable_kerberos", + full_name="google.cloud.dataproc.v1.KerberosConfig.enable_kerberos", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="root_principal_password_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.root_principal_password_uri", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="kms_key_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.kms_key_uri", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="keystore_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.keystore_uri", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="truststore_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.truststore_uri", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="keystore_password_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.keystore_password_uri", + index=5, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="key_password_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.key_password_uri", + index=6, + number=7, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="truststore_password_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.truststore_password_uri", + index=7, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cross_realm_trust_realm", + full_name="google.cloud.dataproc.v1.KerberosConfig.cross_realm_trust_realm", + index=8, + number=9, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cross_realm_trust_kdc", + full_name="google.cloud.dataproc.v1.KerberosConfig.cross_realm_trust_kdc", + index=9, + number=10, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cross_realm_trust_admin_server", + full_name="google.cloud.dataproc.v1.KerberosConfig.cross_realm_trust_admin_server", + index=10, + number=11, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cross_realm_trust_shared_password_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.cross_realm_trust_shared_password_uri", + index=11, + number=12, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="kdc_db_key_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.kdc_db_key_uri", + index=12, + number=13, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="tgt_lifetime_hours", + full_name="google.cloud.dataproc.v1.KerberosConfig.tgt_lifetime_hours", + index=13, + number=14, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="realm", + full_name="google.cloud.dataproc.v1.KerberosConfig.realm", + index=14, + number=15, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3308, + serialized_end=3836, ) @@ -1310,8 +1737,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2930, - serialized_end=2979, + serialized_start=4039, + serialized_end=4088, ) _SOFTWARECONFIG = _descriptor.Descriptor( @@ -1336,7 +1763,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1354,7 +1781,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1372,7 +1799,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1384,8 +1811,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2745, - serialized_end=2979, + serialized_start=3839, + serialized_end=4088, ) @@ -1441,8 +1868,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3162, - serialized_end=3212, + serialized_start=4271, + serialized_end=4321, ) _CLUSTERMETRICS_YARNMETRICSENTRY = _descriptor.Descriptor( @@ -1497,8 +1924,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3214, - serialized_end=3264, + serialized_start=4323, + serialized_end=4373, ) _CLUSTERMETRICS = _descriptor.Descriptor( @@ -1553,8 +1980,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2982, - serialized_end=3264, + serialized_start=4091, + serialized_end=4373, ) @@ -1580,7 +2007,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1598,7 +2025,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1616,7 +2043,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1634,7 +2061,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1646,8 +2073,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3267, - serialized_end=3397, + serialized_start=4376, + serialized_end=4526, ) @@ -1673,7 +2100,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1691,7 +2118,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1709,7 +2136,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1727,7 +2154,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1745,7 +2172,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1763,7 +2190,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1781,7 +2208,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1793,8 +2220,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3400, - serialized_end=3667, + serialized_start=4529, + serialized_end=4831, ) @@ -1820,7 +2247,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1838,7 +2265,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1856,7 +2283,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1874,7 +2301,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1892,7 +2319,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1904,8 +2331,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3669, - serialized_end=3791, + serialized_start=4834, + serialized_end=4981, ) @@ -1931,7 +2358,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1949,7 +2376,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1967,7 +2394,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1979,8 +2406,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3793, - serialized_end=3870, + serialized_start=4983, + serialized_end=5075, ) @@ -2006,7 +2433,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2024,7 +2451,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2042,7 +2469,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2060,7 +2487,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2078,7 +2505,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2090,8 +2517,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3872, - serialized_end=3984, + serialized_start=5078, + serialized_end=5215, ) @@ -2117,7 +2544,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2135,7 +2562,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2147,8 +2574,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3986, - serialized_end=4086, + serialized_start=5217, + serialized_end=5327, ) @@ -2174,7 +2601,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2192,7 +2619,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2210,7 +2637,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2222,8 +2649,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4088, - serialized_end=4170, + serialized_start=5329, + serialized_end=5426, ) @@ -2249,7 +2676,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ) ], @@ -2261,8 +2688,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4172, - serialized_end=4216, + serialized_start=5428, + serialized_end=5477, ) _CLUSTER_LABELSENTRY.containing_type = _CLUSTER @@ -2282,6 +2709,8 @@ "initialization_actions" ].message_type = _NODEINITIALIZATIONACTION _CLUSTERCONFIG.fields_by_name["encryption_config"].message_type = _ENCRYPTIONCONFIG +_CLUSTERCONFIG.fields_by_name["autoscaling_config"].message_type = _AUTOSCALINGCONFIG +_CLUSTERCONFIG.fields_by_name["security_config"].message_type = _SECURITYCONFIG _GCECLUSTERCONFIG_METADATAENTRY.containing_type = _GCECLUSTERCONFIG _GCECLUSTERCONFIG.fields_by_name[ "metadata" @@ -2301,6 +2730,7 @@ _CLUSTERSTATUS.fields_by_name["substate"].enum_type = _CLUSTERSTATUS_SUBSTATE _CLUSTERSTATUS_STATE.containing_type = _CLUSTERSTATUS _CLUSTERSTATUS_SUBSTATE.containing_type = _CLUSTERSTATUS +_SECURITYCONFIG.fields_by_name["kerberos_config"].message_type = _KERBEROSCONFIG _SOFTWARECONFIG_PROPERTIESENTRY.containing_type = _SOFTWARECONFIG _SOFTWARECONFIG.fields_by_name[ "properties" @@ -2327,6 +2757,7 @@ _LISTCLUSTERSRESPONSE.fields_by_name["clusters"].message_type = _CLUSTER DESCRIPTOR.message_types_by_name["Cluster"] = _CLUSTER DESCRIPTOR.message_types_by_name["ClusterConfig"] = _CLUSTERCONFIG +DESCRIPTOR.message_types_by_name["AutoscalingConfig"] = _AUTOSCALINGCONFIG DESCRIPTOR.message_types_by_name["EncryptionConfig"] = _ENCRYPTIONCONFIG DESCRIPTOR.message_types_by_name["GceClusterConfig"] = _GCECLUSTERCONFIG DESCRIPTOR.message_types_by_name["InstanceGroupConfig"] = _INSTANCEGROUPCONFIG @@ -2335,6 +2766,8 @@ DESCRIPTOR.message_types_by_name["DiskConfig"] = _DISKCONFIG DESCRIPTOR.message_types_by_name["NodeInitializationAction"] = _NODEINITIALIZATIONACTION DESCRIPTOR.message_types_by_name["ClusterStatus"] = _CLUSTERSTATUS +DESCRIPTOR.message_types_by_name["SecurityConfig"] = _SECURITYCONFIG +DESCRIPTOR.message_types_by_name["KerberosConfig"] = _KERBEROSCONFIG DESCRIPTOR.message_types_by_name["SoftwareConfig"] = _SOFTWARECONFIG DESCRIPTOR.message_types_by_name["ClusterMetrics"] = _CLUSTERMETRICS DESCRIPTOR.message_types_by_name["CreateClusterRequest"] = _CREATECLUSTERREQUEST @@ -2450,12 +2883,42 @@ else ... worker specific actions ... fi encryption_config: Optional. Encryption settings for the cluster. + autoscaling_config: + Optional. Autoscaling config for the policy associated with + the cluster. Cluster does not autoscale if this field is + unset. + security_config: + Optional. Security settings for the cluster. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterConfig) ), ) _sym_db.RegisterMessage(ClusterConfig) +AutoscalingConfig = _reflection.GeneratedProtocolMessageType( + "AutoscalingConfig", + (_message.Message,), + dict( + DESCRIPTOR=_AUTOSCALINGCONFIG, + __module__="google.cloud.dataproc_v1.proto.clusters_pb2", + __doc__="""Autoscaling Policy config associated with the cluster. + + + Attributes: + policy_uri: + Optional. The autoscaling policy used by the cluster. Only + resource names including projectid and location (region) are + valid. Examples: - ``https://www.googleapis.com/compute/v1/p + rojects/[project_id]/locations/[dataproc_region]/autoscalingPo + licies/[policy_id]`` - ``projects/[project_id]/locations/[dat + aproc_region]/autoscalingPolicies/[policy_id]`` Note that the + policy must be in the same project and Cloud Dataproc region. + """, + # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.AutoscalingConfig) + ), +) +_sym_db.RegisterMessage(AutoscalingConfig) + EncryptionConfig = _reflection.GeneratedProtocolMessageType( "EncryptionConfig", (_message.Message,), @@ -2614,8 +3077,11 @@ preemptible instance groups. accelerators: Optional. The Compute Engine accelerator configuration for - these instances. **Beta Feature**: This feature is still - under development. It may be changed before final release. + these instances. + min_cpu_platform: + Optional. Specifies the minimum cpu platform for the Instance + Group. See [Cloud Dataproc→Minimum CPU Platform] + (/dataproc/docs/concepts/compute/dataproc-min-cpu). """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.InstanceGroupConfig) ), @@ -2748,7 +3214,7 @@ state: Output only. The cluster's state. detail: - Output only. Optional details of cluster's state. + Optional. Output only. Details of cluster's state. state_start_time: Output only. Time when this state was entered. substate: @@ -2760,6 +3226,96 @@ ) _sym_db.RegisterMessage(ClusterStatus) +SecurityConfig = _reflection.GeneratedProtocolMessageType( + "SecurityConfig", + (_message.Message,), + dict( + DESCRIPTOR=_SECURITYCONFIG, + __module__="google.cloud.dataproc_v1.proto.clusters_pb2", + __doc__="""Security related configuration, including Kerberos. + + + Attributes: + kerberos_config: + Kerberos related configuration. + """, + # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SecurityConfig) + ), +) +_sym_db.RegisterMessage(SecurityConfig) + +KerberosConfig = _reflection.GeneratedProtocolMessageType( + "KerberosConfig", + (_message.Message,), + dict( + DESCRIPTOR=_KERBEROSCONFIG, + __module__="google.cloud.dataproc_v1.proto.clusters_pb2", + __doc__="""Specifies Kerberos related configuration. + + + Attributes: + enable_kerberos: + Optional. Flag to indicate whether to Kerberize the cluster. + root_principal_password_uri: + Required. The Cloud Storage URI of a KMS encrypted file + containing the root principal password. + kms_key_uri: + Required. The uri of the KMS key used to encrypt various + sensitive files. + keystore_uri: + Optional. The Cloud Storage URI of the keystore file used for + SSL encryption. If not provided, Dataproc will provide a self- + signed certificate. + truststore_uri: + Optional. The Cloud Storage URI of the truststore file used + for SSL encryption. If not provided, Dataproc will provide a + self-signed certificate. + keystore_password_uri: + Optional. The Cloud Storage URI of a KMS encrypted file + containing the password to the user provided keystore. For the + self-signed certificate, this password is generated by + Dataproc. + key_password_uri: + Optional. The Cloud Storage URI of a KMS encrypted file + containing the password to the user provided key. For the + self-signed certificate, this password is generated by + Dataproc. + truststore_password_uri: + Optional. The Cloud Storage URI of a KMS encrypted file + containing the password to the user provided truststore. For + the self-signed certificate, this password is generated by + Dataproc. + cross_realm_trust_realm: + Optional. The remote realm the Dataproc on-cluster KDC will + trust, should the user enable cross realm trust. + cross_realm_trust_kdc: + Optional. The KDC (IP or hostname) for the remote trusted + realm in a cross realm trust relationship. + cross_realm_trust_admin_server: + Optional. The admin server (IP or hostname) for the remote + trusted realm in a cross realm trust relationship. + cross_realm_trust_shared_password_uri: + Optional. The Cloud Storage URI of a KMS encrypted file + containing the shared password between the on-cluster Kerberos + realm and the remote trusted realm, in a cross realm trust + relationship. + kdc_db_key_uri: + Optional. The Cloud Storage URI of a KMS encrypted file + containing the master key of the KDC database. + tgt_lifetime_hours: + Optional. The lifetime of the ticket granting ticket, in + hours. If not specified, or user specifies 0, then default + value 10 will be used. + realm: + Optional. The name of the on-cluster Kerberos realm. If not + specified, the uppercased domain of hostnames will be the + realm. + """, + # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.KerberosConfig) + ), +) +_sym_db.RegisterMessage(KerberosConfig) + SoftwareConfig = _reflection.GeneratedProtocolMessageType( "SoftwareConfig", (_message.Message,), @@ -2801,7 +3357,7 @@ ``yarn-site.xml`` For more information, see `Cluster properties `__. optional_components: - The set of optional components to activate on the cluster. + Optional. The set of components to activate on the cluster. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SoftwareConfig) ), @@ -2946,7 +3502,11 @@ config.secondary\_worker\_config.num\_instances .. raw:: html .. raw:: html Resize secondary worker group .. raw:: html .. raw:: html .. raw:: - html .. raw:: html + html .. raw:: html + config.autoscaling\_config.policy\_uri .. raw:: html + .. raw:: html Use, stop using, or change + autoscaling policies .. raw:: html .. raw:: html + .. raw:: html .. raw:: html request_id: Optional. A unique id used to identify the request. If the server receives two [UpdateClusterRequest][google.cloud.datapr @@ -3142,19 +3702,111 @@ DESCRIPTOR._options = None _CLUSTER_LABELSENTRY._options = None +_CLUSTER.fields_by_name["labels"]._options = None +_CLUSTER.fields_by_name["status"]._options = None +_CLUSTER.fields_by_name["status_history"]._options = None +_CLUSTER.fields_by_name["cluster_uuid"]._options = None +_CLUSTERCONFIG.fields_by_name["config_bucket"]._options = None +_CLUSTERCONFIG.fields_by_name["gce_cluster_config"]._options = None +_CLUSTERCONFIG.fields_by_name["master_config"]._options = None +_CLUSTERCONFIG.fields_by_name["worker_config"]._options = None +_CLUSTERCONFIG.fields_by_name["secondary_worker_config"]._options = None +_CLUSTERCONFIG.fields_by_name["software_config"]._options = None +_CLUSTERCONFIG.fields_by_name["initialization_actions"]._options = None +_CLUSTERCONFIG.fields_by_name["encryption_config"]._options = None +_CLUSTERCONFIG.fields_by_name["autoscaling_config"]._options = None +_CLUSTERCONFIG.fields_by_name["security_config"]._options = None +_AUTOSCALINGCONFIG.fields_by_name["policy_uri"]._options = None +_ENCRYPTIONCONFIG.fields_by_name["gce_pd_kms_key_name"]._options = None _GCECLUSTERCONFIG_METADATAENTRY._options = None +_GCECLUSTERCONFIG.fields_by_name["zone_uri"]._options = None +_GCECLUSTERCONFIG.fields_by_name["network_uri"]._options = None +_GCECLUSTERCONFIG.fields_by_name["subnetwork_uri"]._options = None +_GCECLUSTERCONFIG.fields_by_name["internal_ip_only"]._options = None +_GCECLUSTERCONFIG.fields_by_name["service_account"]._options = None +_GCECLUSTERCONFIG.fields_by_name["service_account_scopes"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["num_instances"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["instance_names"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["image_uri"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["machine_type_uri"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["disk_config"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["is_preemptible"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["managed_group_config"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["accelerators"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["min_cpu_platform"]._options = None +_MANAGEDGROUPCONFIG.fields_by_name["instance_template_name"]._options = None +_MANAGEDGROUPCONFIG.fields_by_name["instance_group_manager_name"]._options = None +_DISKCONFIG.fields_by_name["boot_disk_type"]._options = None +_DISKCONFIG.fields_by_name["boot_disk_size_gb"]._options = None +_DISKCONFIG.fields_by_name["num_local_ssds"]._options = None +_NODEINITIALIZATIONACTION.fields_by_name["executable_file"]._options = None +_NODEINITIALIZATIONACTION.fields_by_name["execution_timeout"]._options = None +_CLUSTERSTATUS.fields_by_name["state"]._options = None +_CLUSTERSTATUS.fields_by_name["detail"]._options = None +_CLUSTERSTATUS.fields_by_name["state_start_time"]._options = None +_CLUSTERSTATUS.fields_by_name["substate"]._options = None +_KERBEROSCONFIG.fields_by_name["enable_kerberos"]._options = None +_KERBEROSCONFIG.fields_by_name["root_principal_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["kms_key_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["keystore_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["truststore_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["keystore_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["key_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["truststore_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_realm"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_kdc"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_admin_server"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_shared_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["kdc_db_key_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["tgt_lifetime_hours"]._options = None +_KERBEROSCONFIG.fields_by_name["realm"]._options = None _SOFTWARECONFIG_PROPERTIESENTRY._options = None +_SOFTWARECONFIG.fields_by_name["image_version"]._options = None +_SOFTWARECONFIG.fields_by_name["properties"]._options = None +_SOFTWARECONFIG.fields_by_name["optional_components"]._options = None _CLUSTERMETRICS_HDFSMETRICSENTRY._options = None _CLUSTERMETRICS_YARNMETRICSENTRY._options = None +_CREATECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_CREATECLUSTERREQUEST.fields_by_name["region"]._options = None +_CREATECLUSTERREQUEST.fields_by_name["cluster"]._options = None +_CREATECLUSTERREQUEST.fields_by_name["request_id"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["region"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["cluster"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["graceful_decommission_timeout"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["update_mask"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["request_id"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["region"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["cluster_uuid"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["request_id"]._options = None +_GETCLUSTERREQUEST.fields_by_name["project_id"]._options = None +_GETCLUSTERREQUEST.fields_by_name["region"]._options = None +_GETCLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["project_id"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["region"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["filter"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["page_size"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["page_token"]._options = None +_LISTCLUSTERSRESPONSE.fields_by_name["clusters"]._options = None +_LISTCLUSTERSRESPONSE.fields_by_name["next_page_token"]._options = None +_DIAGNOSECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_DIAGNOSECLUSTERREQUEST.fields_by_name["region"]._options = None +_DIAGNOSECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_DIAGNOSECLUSTERRESULTS.fields_by_name["output_uri"]._options = None _CLUSTERCONTROLLER = _descriptor.ServiceDescriptor( name="ClusterController", full_name="google.cloud.dataproc.v1.ClusterController", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=4219, - serialized_end=5293, + serialized_options=_b( + "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=5480, + serialized_end=7062, methods=[ _descriptor.MethodDescriptor( name="CreateCluster", @@ -3164,7 +3816,7 @@ input_type=_CREATECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002>"3/v1/projects/{project_id}/regions/{region}/clusters:\007cluster' + '\202\323\344\223\002>"3/v1/projects/{project_id}/regions/{region}/clusters:\007cluster\332A\031project_id,region,cluster\312A<\n\007Cluster\0221google.cloud.dataproc.v1.ClusterOperationMetadata' ), ), _descriptor.MethodDescriptor( @@ -3175,7 +3827,7 @@ input_type=_UPDATECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\002M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster" + "\202\323\344\223\002M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster\312A<\n\007Cluster\0221google.cloud.dataproc.v1.ClusterOperationMetadata" ), ), _descriptor.MethodDescriptor( @@ -3186,7 +3838,7 @@ input_type=_DELETECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\002D*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + "\202\323\344\223\002D*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\332A\036project_id,region,cluster_name\312AJ\n\025google.protobuf.Empty\0221google.cloud.dataproc.v1.ClusterOperationMetadata" ), ), _descriptor.MethodDescriptor( @@ -3197,7 +3849,7 @@ input_type=_GETCLUSTERREQUEST, output_type=_CLUSTER, serialized_options=_b( - "\202\323\344\223\002D\022B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + "\202\323\344\223\002D\022B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\332A\036project_id,region,cluster_name" ), ), _descriptor.MethodDescriptor( @@ -3208,7 +3860,7 @@ input_type=_LISTCLUSTERSREQUEST, output_type=_LISTCLUSTERSRESPONSE, serialized_options=_b( - "\202\323\344\223\0025\0223/v1/projects/{project_id}/regions/{region}/clusters" + "\202\323\344\223\0025\0223/v1/projects/{project_id}/regions/{region}/clusters\332A\021project_id,region\332A\030project_id,region,filter" ), ), _descriptor.MethodDescriptor( @@ -3219,7 +3871,7 @@ input_type=_DIAGNOSECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\001*' + '\202\323\344\223\002P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\001*\332A\036project_id,region,cluster_name\312A/\n\025google.protobuf.Empty\022\026DiagnoseClusterResults' ), ), ], diff --git a/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py index 5d4b275b8ab0..def69f148416 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py +++ b/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py @@ -58,21 +58,27 @@ class ClusterControllerServicer(object): """ def CreateCluster(self, request, context): - """Creates a cluster in a project. + """Creates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateCluster(self, request, context): - """Updates a cluster in a project. + """Updates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteCluster(self, request, context): - """Deletes a cluster in a project. + """Deletes a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -93,9 +99,13 @@ def ListClusters(self, request, context): raise NotImplementedError("Method not implemented!") def DiagnoseCluster(self, request, context): - """Gets cluster diagnostic information. - After the operation completes, the Operation.response field - contains `DiagnoseClusterOutputLocation`. + """Gets cluster diagnostic information. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + After the operation completes, + [Operation.response][google.longrunning.Operation.response] + contains + [DiagnoseClusterResults](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/dataproc/google/cloud/dataproc_v1/proto/jobs.proto b/dataproc/google/cloud/dataproc_v1/proto/jobs.proto index 7ead7bb95ca0..eeba155deebe 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/jobs.proto +++ b/dataproc/google/cloud/dataproc_v1/proto/jobs.proto @@ -18,6 +18,8 @@ syntax = "proto3"; package google.cloud.dataproc.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; @@ -29,12 +31,16 @@ option java_package = "com.google.cloud.dataproc.v1"; // The JobController provides methods to manage jobs. service JobController { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Submits a job to a cluster. rpc SubmitJob(SubmitJobRequest) returns (Job) { option (google.api.http) = { post: "/v1/projects/{project_id}/regions/{region}/jobs:submit" body: "*" }; + option (google.api.method_signature) = "project_id,region,job"; } // Gets the resource representation for a job in a project. @@ -42,6 +48,7 @@ service JobController { option (google.api.http) = { get: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" }; + option (google.api.method_signature) = "project_id,region,job_id"; } // Lists regions/{region}/jobs in a project. @@ -49,6 +56,8 @@ service JobController { option (google.api.http) = { get: "/v1/projects/{project_id}/regions/{region}/jobs" }; + option (google.api.method_signature) = "project_id,region"; + option (google.api.method_signature) = "project_id,region,filter"; } // Updates a job in a project. @@ -69,6 +78,7 @@ service JobController { post: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" body: "*" }; + option (google.api.method_signature) = "project_id,region,job_id"; } // Deletes the job from the project. If the job is active, the delete fails, @@ -77,6 +87,7 @@ service JobController { option (google.api.http) = { delete: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" }; + option (google.api.method_signature) = "project_id,region,job_id"; } } @@ -148,30 +159,30 @@ message HadoopJob { // include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as // job properties, since a collision may occur that causes an incorrect job // submission. - repeated string args = 3; + repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Jar file URIs to add to the CLASSPATHs of the // Hadoop driver and tasks. - repeated string jar_file_uris = 4; + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied // to the working directory of Hadoop drivers and distributed tasks. Useful // for naively parallel tasks. - repeated string file_uris = 5; + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of archives to be extracted in the working directory of // Hadoop drivers and tasks. Supported file types: // .jar, .tar, .tar.gz, .tgz, or .zip. - repeated string archive_uris = 6; + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure Hadoop. // Properties that conflict with values set by the Cloud Dataproc API may be // overwritten. Can include properties set in /etc/hadoop/conf/*-site and // classes in user code. - map properties = 7; + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8; + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; } // A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/) @@ -194,29 +205,29 @@ message SparkJob { // Optional. The arguments to pass to the driver. Do not include arguments, // such as `--conf`, that can be set as job properties, since a collision may // occur that causes an incorrect job submission. - repeated string args = 3; + repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the // Spark driver and tasks. - repeated string jar_file_uris = 4; + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of files to be copied to the working directory of // Spark drivers and distributed tasks. Useful for naively parallel tasks. - repeated string file_uris = 5; + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of archives to be extracted in the working directory // of Spark drivers and tasks. Supported file types: // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 6; + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure Spark. // Properties that conflict with values set by the Cloud Dataproc API may be // overwritten. Can include properties set in // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 7; + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8; + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; } // A Cloud Dataproc job for running @@ -226,37 +237,37 @@ message SparkJob { message PySparkJob { // Required. The HCFS URI of the main Python file to use as the driver. Must // be a .py file. - string main_python_file_uri = 1; + string main_python_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The arguments to pass to the driver. Do not include arguments, // such as `--conf`, that can be set as job properties, since a collision may // occur that causes an incorrect job submission. - repeated string args = 2; + repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS file URIs of Python files to pass to the PySpark // framework. Supported file types: .py, .egg, and .zip. - repeated string python_file_uris = 3; + repeated string python_file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the // Python driver and tasks. - repeated string jar_file_uris = 4; + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of files to be copied to the working directory of // Python drivers and distributed tasks. Useful for naively parallel tasks. - repeated string file_uris = 5; + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of archives to be extracted in the working directory of // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 6; + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure PySpark. // Properties that conflict with values set by the Cloud Dataproc API may be // overwritten. Can include properties set in // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 7; + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8; + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; } // A list of queries to run on a cluster. @@ -275,7 +286,7 @@ message QueryList { // ] // } // } - repeated string queries = 1; + repeated string queries = 1 [(google.api.field_behavior) = REQUIRED]; } // A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/) @@ -294,22 +305,22 @@ message HiveJob { // Optional. Whether to continue executing queries if a query fails. // The default value is `false`. Setting to `true` can be useful when // executing independent parallel queries. - bool continue_on_failure = 3; + bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Mapping of query variable names to values (equivalent to the // Hive command: `SET name="value";`). - map script_variables = 4; + map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names and values, used to configure Hive. // Properties that conflict with values set by the Cloud Dataproc API may be // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, // /etc/hive/conf/hive-site.xml, and classes in user code. - map properties = 5; + map properties = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to add to the CLASSPATH of the // Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes // and UDFs. - repeated string jar_file_uris = 6; + repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; } // A Cloud Dataproc job for running [Apache Spark @@ -327,18 +338,18 @@ message SparkSqlJob { // Optional. Mapping of query variable names to values (equivalent to the // Spark SQL command: SET `name="value";`). - map script_variables = 3; + map script_variables = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure // Spark SQL's SparkConf. Properties that conflict with values set by the // Cloud Dataproc API may be overwritten. - map properties = 4; + map properties = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. - repeated string jar_file_uris = 56; + repeated string jar_file_uris = 56 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 6; + LoggingConfig logging_config = 6 [(google.api.field_behavior) = OPTIONAL]; } // A Cloud Dataproc job for running [Apache Pig](https://pig.apache.org/) @@ -357,34 +368,34 @@ message PigJob { // Optional. Whether to continue executing queries if a query fails. // The default value is `false`. Setting to `true` can be useful when // executing independent parallel queries. - bool continue_on_failure = 3; + bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Mapping of query variable names to values (equivalent to the Pig // command: `name=[value]`). - map script_variables = 4; + map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure Pig. // Properties that conflict with values set by the Cloud Dataproc API may be // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, // /etc/pig/conf/pig.properties, and classes in user code. - map properties = 5; + map properties = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to add to the CLASSPATH of // the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. - repeated string jar_file_uris = 6; + repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 7; + LoggingConfig logging_config = 7 [(google.api.field_behavior) = OPTIONAL]; } // Cloud Dataproc job config. message JobPlacement { // Required. The name of the cluster where the job will be submitted. - string cluster_name = 1; + string cluster_name = 1 [(google.api.field_behavior) = REQUIRED]; // Output only. A cluster UUID generated by the Cloud Dataproc service when // the job is submitted. - string cluster_uuid = 2; + string cluster_uuid = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Cloud Dataproc job status. @@ -453,25 +464,28 @@ message JobStatus { } // Output only. A state message specifying the overall job state. - State state = 1; + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. Optional job state details, such as an error + // Optional. Output only. Job state details, such as an error // description if the state is ERROR. - string details = 2; + string details = 2 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; // Output only. The time when this state was entered. - google.protobuf.Timestamp state_start_time = 6; + google.protobuf.Timestamp state_start_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Additional state information, which includes // status reported by the agent. - Substate substate = 7; + Substate substate = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Encapsulates the full scoping used to reference a job. message JobReference { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The job ID, which must be unique within the project. // @@ -479,7 +493,7 @@ message JobReference { // underscores (_), or hyphens (-). The maximum length is 100 characters. // // If not specified by the caller, the job ID will be provided by the server. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = OPTIONAL]; } // A YARN application created by a job. Application information is a subset of @@ -520,19 +534,19 @@ message YarnApplication { } // Required. The application name. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The application state. - State state = 2; + State state = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The numerical progress of the application, from 1 to 100. - float progress = 3; + float progress = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or // TimelineServer that provides application-specific information. The URL uses // the internal hostname, and requires a proxy server for resolution and, // possibly, access. - string tracking_url = 4; + string tracking_url = 4 [(google.api.field_behavior) = OPTIONAL]; } // A Cloud Dataproc job resource. @@ -541,11 +555,11 @@ message Job { // obtain the equivalent REST path of the job resource. If this property // is not specified when a job is created, the server generates a // job_id. - JobReference reference = 1; + JobReference reference = 1 [(google.api.field_behavior) = OPTIONAL]; // Required. Job information, including how, when, and where to // run the job. - JobPlacement placement = 2; + JobPlacement placement = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The application/framework-specific portion of the job. oneof type_job { @@ -571,25 +585,25 @@ message Job { // Output only. The job status. Additional application-specific // status information may be contained in the type_job // and yarn_applications fields. - JobStatus status = 8; + JobStatus status = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The previous job status. - repeated JobStatus status_history = 13; + repeated JobStatus status_history = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The collection of YARN applications spun up by this job. // // **Beta** Feature: This report is available for testing purposes only. It // may be changed before final release. - repeated YarnApplication yarn_applications = 9; + repeated YarnApplication yarn_applications = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A URI pointing to the location of the stdout of the job's // driver program. - string driver_output_resource_uri = 17; + string driver_output_resource_uri = 17 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. If present, the location of miscellaneous control files // which may be used as part of job setup and handling. If not present, // control files may be placed in the same location as `driver_output_uri`. - string driver_control_files_uri = 15; + string driver_control_files_uri = 15 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The labels to associate with this job. // Label **keys** must contain 1 to 63 characters, and must conform to @@ -598,15 +612,15 @@ message Job { // characters, and must conform to [RFC // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be // associated with a job. - map labels = 18; + map labels = 18 [(google.api.field_behavior) = OPTIONAL]; // Optional. Job scheduling configuration. - JobScheduling scheduling = 20; + JobScheduling scheduling = 20 [(google.api.field_behavior) = OPTIONAL]; // Output only. A UUID that uniquely identifies a job within the project // over time. This is in contrast to a user-settable reference.job_id that // may be reused over time. - string job_uuid = 22; + string job_uuid = 22 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Job scheduling options. @@ -619,20 +633,20 @@ message JobScheduling { // 4 times within 10 minute window. // // Maximum value is 10. - int32 max_failures_per_hour = 1; + int32 max_failures_per_hour = 1 [(google.api.field_behavior) = OPTIONAL]; } // A request to submit a job. message SubmitJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job resource. - Job job = 2; + Job job = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server // receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests with the same @@ -645,20 +659,20 @@ message SubmitJobRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4; + string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; } // A request to get the resource representation for a job in a project. message GetJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to list jobs in a project. @@ -678,27 +692,27 @@ message ListJobsRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 6; + string region = 6 [(google.api.field_behavior) = REQUIRED]; // Optional. The number of results to return in each response. - int32 page_size = 2; + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The page token, returned by a previous call, to request the // next page of results. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. If set, the returned jobs list includes only jobs that were // submitted to the named cluster. - string cluster_name = 4; + string cluster_name = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. Specifies enumerated categories of jobs to list. // (default = match ALL jobs). // // If `filter` is provided, `jobStateMatcher` will be ignored. - JobStateMatcher job_state_matcher = 5; + JobStateMatcher job_state_matcher = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. A filter constraining the jobs to list. Filters are // case-sensitive and have the following syntax: @@ -714,23 +728,23 @@ message ListJobsRequest { // Example filter: // // status.state = ACTIVE AND labels.env = staging AND labels.starred = * - string filter = 7; + string filter = 7 [(google.api.field_behavior) = OPTIONAL]; } // A request to update a job. message UpdateJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 2; + string region = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 3; + string job_id = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The changes to the job. - Job job = 4; + Job job = 4 [(google.api.field_behavior) = REQUIRED]; // Required. Specifies the path, relative to Job, of // the field to update. For example, to update the labels of a Job the @@ -738,42 +752,42 @@ message UpdateJobRequest { // labels, and the `PATCH` request body would specify the new // value. Note: Currently, labels is the only // field that can be updated. - google.protobuf.FieldMask update_mask = 5; + google.protobuf.FieldMask update_mask = 5 [(google.api.field_behavior) = REQUIRED]; } // A list of jobs in a project. message ListJobsResponse { // Output only. Jobs list. - repeated Job jobs = 1; + repeated Job jobs = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. This token is included in the response if there are more results // to fetch. To fetch additional results, provide this value as the // `page_token` in a subsequent ListJobsRequest. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OPTIONAL]; } // A request to cancel a job. message CancelJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to delete a job. message DeleteJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; } diff --git a/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py index 67011adb07f3..294c5acca05e 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py +++ b/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py @@ -16,6 +16,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -29,10 +31,12 @@ "\n\034com.google.cloud.dataproc.v1B\tJobsProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" ), serialized_pb=_b( - '\n)google/cloud/dataproc_v1/proto/jobs.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc1\x02\n\rLoggingConfig\x12W\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32<.google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry\x1a\x65\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12<\n\x05value\x18\x02 \x01(\x0e\x32-.google.cloud.dataproc.v1.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xd3\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12G\n\nproperties\x18\x07 \x03(\x0b\x32\x33.google.cloud.dataproc.v1.HadoopJob.PropertiesEntry\x12?\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xd1\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12\x46\n\nproperties\x18\x07 \x03(\x0b\x32\x32.google.cloud.dataproc.v1.SparkJob.PropertiesEntry\x12?\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xd0\x02\n\nPySparkJob\x12\x1c\n\x14main_python_file_uri\x18\x01 \x01(\t\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x18\n\x10python_file_uris\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12H\n\nproperties\x18\x07 \x03(\x0b\x32\x34.google.cloud.dataproc.v1.PySparkJob.PropertiesEntry\x12?\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\tQueryList\x12\x0f\n\x07queries\x18\x01 \x03(\t"\xa1\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12P\n\x10script_variables\x18\x04 \x03(\x0b\x32\x36.google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntry\x12\x45\n\nproperties\x18\x05 \x03(\x0b\x32\x31.google.cloud.dataproc.v1.HiveJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xd1\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12T\n\x10script_variables\x18\x03 \x03(\x0b\x32:.google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntry\x12I\n\nproperties\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x38 \x03(\t\x12?\n\x0elogging_config\x18\x06 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xdf\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12O\n\x10script_variables\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.PigJob.ScriptVariablesEntry\x12\x44\n\nproperties\x18\x05 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.PigJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x12?\n\x0elogging_config\x18\x07 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries":\n\x0cJobPlacement\x12\x14\n\x0c\x63luster_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x02 \x01(\t"\xc2\x03\n\tJobStatus\x12\x38\n\x05state\x18\x01 \x01(\x0e\x32).google.cloud.dataproc.v1.JobStatus.State\x12\x0f\n\x07\x64\x65tails\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12>\n\x08substate\x18\x07 \x01(\x0e\x32,.google.cloud.dataproc.v1.JobStatus.Substate"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"2\n\x0cJobReference\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\x91\x02\n\x0fYarnApplication\x12\x0c\n\x04name\x18\x01 \x01(\t\x12>\n\x05state\x18\x02 \x01(\x0e\x32/.google.cloud.dataproc.v1.YarnApplication.State\x12\x10\n\x08progress\x18\x03 \x01(\x02\x12\x14\n\x0ctracking_url\x18\x04 \x01(\t"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\x9b\x07\n\x03Job\x12\x39\n\treference\x18\x01 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobReference\x12\x39\n\tplacement\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobPlacement\x12\x39\n\nhadoop_job\x18\x03 \x01(\x0b\x32#.google.cloud.dataproc.v1.HadoopJobH\x00\x12\x37\n\tspark_job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1.SparkJobH\x00\x12;\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.PySparkJobH\x00\x12\x35\n\x08hive_job\x18\x06 \x01(\x0b\x32!.google.cloud.dataproc.v1.HiveJobH\x00\x12\x33\n\x07pig_job\x18\x07 \x01(\x0b\x32 .google.cloud.dataproc.v1.PigJobH\x00\x12>\n\rspark_sql_job\x18\x0c \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12\x33\n\x06status\x18\x08 \x01(\x0b\x32#.google.cloud.dataproc.v1.JobStatus\x12;\n\x0estatus_history\x18\r \x03(\x0b\x32#.google.cloud.dataproc.v1.JobStatus\x12\x44\n\x11yarn_applications\x18\t \x03(\x0b\x32).google.cloud.dataproc.v1.YarnApplication\x12"\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\t\x12 \n\x18\x64river_control_files_uri\x18\x0f \x01(\t\x12\x39\n\x06labels\x18\x12 \x03(\x0b\x32).google.cloud.dataproc.v1.Job.LabelsEntry\x12;\n\nscheduling\x18\x14 \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobScheduling\x12\x10\n\x08job_uuid\x18\x16 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job".\n\rJobScheduling\x12\x1d\n\x15max_failures_per_hour\x18\x01 \x01(\x05"v\n\x10SubmitJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12*\n\x03job\x18\x02 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.Job\x12\x12\n\nrequest_id\x18\x04 \x01(\t"C\n\rGetJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\x90\x02\n\x0fListJobsRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x06 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x04 \x01(\t\x12T\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32\x39.google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcher\x12\x0e\n\x06\x66ilter\x18\x07 \x01(\t"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xa3\x01\n\x10UpdateJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x02 \x01(\t\x12\x0e\n\x06job_id\x18\x03 \x01(\t\x12*\n\x03job\x18\x04 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.Job\x12/\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"X\n\x10ListJobsResponse\x12+\n\x04jobs\x18\x01 \x03(\x0b\x32\x1d.google.cloud.dataproc.v1.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"F\n\x10\x43\x61ncelJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"F\n\x10\x44\x65leteJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t2\xb6\x07\n\rJobController\x12\x99\x01\n\tSubmitJob\x12*.google.cloud.dataproc.v1.SubmitJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"A\x82\xd3\xe4\x93\x02;"6/v1/projects/{project_id}/regions/{region}/jobs:submit:\x01*\x12\x92\x01\n\x06GetJob\x12\'.google.cloud.dataproc.v1.GetJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"@\x82\xd3\xe4\x93\x02:\x12\x38/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\x12\x9a\x01\n\x08ListJobs\x12).google.cloud.dataproc.v1.ListJobsRequest\x1a*.google.cloud.dataproc.v1.ListJobsResponse"7\x82\xd3\xe4\x93\x02\x31\x12//v1/projects/{project_id}/regions/{region}/jobs\x12\x9d\x01\n\tUpdateJob\x12*.google.cloud.dataproc.v1.UpdateJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"E\x82\xd3\xe4\x93\x02?28/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xa2\x01\n\tCancelJob\x12*.google.cloud.dataproc.v1.CancelJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"J\x82\xd3\xe4\x93\x02\x44"?/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\x12\x91\x01\n\tDeleteJob\x12*.google.cloud.dataproc.v1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"@\x82\xd3\xe4\x93\x02:*8/v1/projects/{project_id}/regions/{region}/jobs/{job_id}Bm\n\x1c\x63om.google.cloud.dataproc.v1B\tJobsProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' + '\n)google/cloud/dataproc_v1/proto/jobs.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc1\x02\n\rLoggingConfig\x12W\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32<.google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry\x1a\x65\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12<\n\x05value\x18\x02 \x01(\x0e\x32-.google.cloud.dataproc.v1.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xf1\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12L\n\nproperties\x18\x07 \x03(\x0b\x32\x33.google.cloud.dataproc.v1.HadoopJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xef\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12K\n\nproperties\x18\x07 \x03(\x0b\x32\x32.google.cloud.dataproc.v1.SparkJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xf8\x02\n\nPySparkJob\x12!\n\x14main_python_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04\x61rgs\x18\x02 \x03(\tB\x03\xe0\x41\x01\x12\x1d\n\x10python_file_uris\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12M\n\nproperties\x18\x07 \x03(\x0b\x32\x34.google.cloud.dataproc.v1.PySparkJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"!\n\tQueryList\x12\x14\n\x07queries\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xb5\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12U\n\x10script_variables\x18\x04 \x03(\x0b\x32\x36.google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x31.google.cloud.dataproc.v1.HiveJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xe5\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12Y\n\x10script_variables\x18\x03 \x03(\x0b\x32:.google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12N\n\nproperties\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x38 \x03(\tB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf8\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12T\n\x10script_variables\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.PigJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12I\n\nproperties\x18\x05 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.PigJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"D\n\x0cJobPlacement\x12\x19\n\x0c\x63luster_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xd9\x03\n\tJobStatus\x12=\n\x05state\x18\x01 \x01(\x0e\x32).google.cloud.dataproc.v1.JobStatus.StateB\x03\xe0\x41\x03\x12\x17\n\x07\x64\x65tails\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x43\n\x08substate\x18\x07 \x01(\x0e\x32,.google.cloud.dataproc.v1.JobStatus.SubstateB\x03\xe0\x41\x03"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"<\n\x0cJobReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x01"\xa5\x02\n\x0fYarnApplication\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x43\n\x05state\x18\x02 \x01(\x0e\x32/.google.cloud.dataproc.v1.YarnApplication.StateB\x03\xe0\x41\x02\x12\x15\n\x08progress\x18\x03 \x01(\x02\x42\x03\xe0\x41\x02\x12\x19\n\x0ctracking_url\x18\x04 \x01(\tB\x03\xe0\x41\x01"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xcd\x07\n\x03Job\x12>\n\treference\x18\x01 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobReferenceB\x03\xe0\x41\x01\x12>\n\tplacement\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobPlacementB\x03\xe0\x41\x02\x12\x39\n\nhadoop_job\x18\x03 \x01(\x0b\x32#.google.cloud.dataproc.v1.HadoopJobH\x00\x12\x37\n\tspark_job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1.SparkJobH\x00\x12;\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.PySparkJobH\x00\x12\x35\n\x08hive_job\x18\x06 \x01(\x0b\x32!.google.cloud.dataproc.v1.HiveJobH\x00\x12\x33\n\x07pig_job\x18\x07 \x01(\x0b\x32 .google.cloud.dataproc.v1.PigJobH\x00\x12>\n\rspark_sql_job\x18\x0c \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12\x38\n\x06status\x18\x08 \x01(\x0b\x32#.google.cloud.dataproc.v1.JobStatusB\x03\xe0\x41\x03\x12@\n\x0estatus_history\x18\r \x03(\x0b\x32#.google.cloud.dataproc.v1.JobStatusB\x03\xe0\x41\x03\x12I\n\x11yarn_applications\x18\t \x03(\x0b\x32).google.cloud.dataproc.v1.YarnApplicationB\x03\xe0\x41\x03\x12\'\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\tB\x03\xe0\x41\x03\x12%\n\x18\x64river_control_files_uri\x18\x0f \x01(\tB\x03\xe0\x41\x03\x12>\n\x06labels\x18\x12 \x03(\x0b\x32).google.cloud.dataproc.v1.Job.LabelsEntryB\x03\xe0\x41\x01\x12@\n\nscheduling\x18\x14 \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobSchedulingB\x03\xe0\x41\x01\x12\x15\n\x08job_uuid\x18\x16 \x01(\tB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job"3\n\rJobScheduling\x12"\n\x15max_failures_per_hour\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01"\x8a\x01\n\x10SubmitJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12/\n\x03job\x18\x02 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"R\n\rGetJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\xb3\x02\n\x0fListJobsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x63luster_name\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12Y\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32\x39.google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcherB\x03\xe0\x41\x01\x12\x13\n\x06\x66ilter\x18\x07 \x01(\tB\x03\xe0\x41\x01"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xbc\x01\n\x10UpdateJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12/\n\x03job\x18\x04 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"b\n\x10ListJobsResponse\x12\x30\n\x04jobs\x18\x01 \x03(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x01"U\n\x10\x43\x61ncelJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"U\n\x10\x44\x65leteJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x32\x9b\t\n\rJobController\x12\xb1\x01\n\tSubmitJob\x12*.google.cloud.dataproc.v1.SubmitJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"Y\x82\xd3\xe4\x93\x02;"6/v1/projects/{project_id}/regions/{region}/jobs:submit:\x01*\xda\x41\x15project_id,region,job\x12\xad\x01\n\x06GetJob\x12\'.google.cloud.dataproc.v1.GetJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"[\x82\xd3\xe4\x93\x02:\x12\x38/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x18project_id,region,job_id\x12\xc9\x01\n\x08ListJobs\x12).google.cloud.dataproc.v1.ListJobsRequest\x1a*.google.cloud.dataproc.v1.ListJobsResponse"f\x82\xd3\xe4\x93\x02\x31\x12//v1/projects/{project_id}/regions/{region}/jobs\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x9d\x01\n\tUpdateJob\x12*.google.cloud.dataproc.v1.UpdateJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"E\x82\xd3\xe4\x93\x02?28/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xbd\x01\n\tCancelJob\x12*.google.cloud.dataproc.v1.CancelJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"e\x82\xd3\xe4\x93\x02\x44"?/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\xda\x41\x18project_id,region,job_id\x12\xac\x01\n\tDeleteJob\x12*.google.cloud.dataproc.v1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02:*8/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x18project_id,region,job_id\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBm\n\x1c\x63om.google.cloud.dataproc.v1B\tJobsProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, @@ -80,8 +84,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=407, - serialized_end=519, + serialized_start=465, + serialized_end=577, ) _sym_db.RegisterEnumDescriptor(_LOGGINGCONFIG_LEVEL) @@ -132,8 +136,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3210, - serialized_end=3379, + serialized_start=3471, + serialized_end=3640, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS_STATE) @@ -158,8 +162,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3381, - serialized_end=3453, + serialized_start=3642, + serialized_end=3714, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS_SUBSTATE) @@ -203,8 +207,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3646, - serialized_end=3781, + serialized_start=3937, + serialized_end=4072, ) _sym_db.RegisterEnumDescriptor(_YARNAPPLICATION_STATE) @@ -226,8 +230,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5165, - serialized_end=5219, + serialized_start=5582, + serialized_end=5636, ) _sym_db.RegisterEnumDescriptor(_LISTJOBSREQUEST_JOBSTATEMATCHER) @@ -284,8 +288,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=304, - serialized_end=405, + serialized_start=362, + serialized_end=463, ) _LOGGINGCONFIG = _descriptor.Descriptor( @@ -322,8 +326,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=198, - serialized_end=519, + serialized_start=256, + serialized_end=577, ) @@ -379,8 +383,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=802, - serialized_end=851, + serialized_start=890, + serialized_end=939, ) _HADOOPJOB = _descriptor.Descriptor( @@ -441,7 +445,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -459,7 +463,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -477,7 +481,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -495,7 +499,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -513,7 +517,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -531,7 +535,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -551,8 +555,8 @@ fields=[], ) ], - serialized_start=522, - serialized_end=861, + serialized_start=580, + serialized_end=949, ) @@ -608,8 +612,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=802, - serialized_end=851, + serialized_start=890, + serialized_end=939, ) _SPARKJOB = _descriptor.Descriptor( @@ -670,7 +674,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -688,7 +692,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -706,7 +710,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -724,7 +728,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -742,7 +746,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -760,7 +764,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -780,8 +784,8 @@ fields=[], ) ], - serialized_start=864, - serialized_end=1201, + serialized_start=952, + serialized_end=1319, ) @@ -837,8 +841,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=802, - serialized_end=851, + serialized_start=890, + serialized_end=939, ) _PYSPARKJOB = _descriptor.Descriptor( @@ -863,7 +867,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -881,7 +885,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -899,7 +903,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -917,7 +921,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -935,7 +939,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -953,7 +957,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -971,7 +975,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -989,7 +993,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1001,8 +1005,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1204, - serialized_end=1540, + serialized_start=1322, + serialized_end=1698, ) @@ -1028,7 +1032,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -1040,8 +1044,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1542, - serialized_end=1570, + serialized_start=1700, + serialized_end=1733, ) @@ -1097,8 +1101,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1874, - serialized_end=1928, + serialized_start=2057, + serialized_end=2111, ) _HIVEJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1153,8 +1157,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=802, - serialized_end=851, + serialized_start=890, + serialized_end=939, ) _HIVEJOB = _descriptor.Descriptor( @@ -1215,7 +1219,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1233,7 +1237,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1251,7 +1255,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1269,7 +1273,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1289,8 +1293,8 @@ fields=[], ) ], - serialized_start=1573, - serialized_end=1990, + serialized_start=1736, + serialized_end=2173, ) @@ -1346,8 +1350,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1874, - serialized_end=1928, + serialized_start=2057, + serialized_end=2111, ) _SPARKSQLJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1402,8 +1406,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=802, - serialized_end=851, + serialized_start=890, + serialized_end=939, ) _SPARKSQLJOB = _descriptor.Descriptor( @@ -1464,7 +1468,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1482,7 +1486,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1500,7 +1504,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1518,7 +1522,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1538,8 +1542,8 @@ fields=[], ) ], - serialized_start=1993, - serialized_end=2458, + serialized_start=2176, + serialized_end=2661, ) @@ -1595,8 +1599,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1874, - serialized_end=1928, + serialized_start=2057, + serialized_end=2111, ) _PIGJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1651,8 +1655,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=802, - serialized_end=851, + serialized_start=890, + serialized_end=939, ) _PIGJOB = _descriptor.Descriptor( @@ -1713,7 +1717,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1731,7 +1735,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1749,7 +1753,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1767,7 +1771,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1785,7 +1789,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1805,8 +1809,8 @@ fields=[], ) ], - serialized_start=2461, - serialized_end=2940, + serialized_start=2664, + serialized_end=3168, ) @@ -1832,7 +1836,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1850,7 +1854,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1862,8 +1866,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2942, - serialized_end=3000, + serialized_start=3170, + serialized_end=3238, ) @@ -1889,7 +1893,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1907,7 +1911,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1925,7 +1929,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1943,7 +1947,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1955,8 +1959,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3003, - serialized_end=3453, + serialized_start=3241, + serialized_end=3714, ) @@ -1982,7 +1986,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2000,7 +2004,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2012,8 +2016,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3455, - serialized_end=3505, + serialized_start=3716, + serialized_end=3776, ) @@ -2039,7 +2043,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2057,7 +2061,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2075,7 +2079,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2093,7 +2097,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2105,8 +2109,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3508, - serialized_end=3781, + serialized_start=3779, + serialized_end=4072, ) @@ -2162,8 +2166,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4650, - serialized_end=4695, + serialized_start=4991, + serialized_end=5036, ) _JOB = _descriptor.Descriptor( @@ -2188,7 +2192,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2206,7 +2210,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2332,7 +2336,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2350,7 +2354,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2368,7 +2372,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2386,7 +2390,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2404,7 +2408,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2422,7 +2426,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2440,7 +2444,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2458,7 +2462,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2478,8 +2482,8 @@ fields=[], ) ], - serialized_start=3784, - serialized_end=4707, + serialized_start=4075, + serialized_end=5048, ) @@ -2505,7 +2509,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -2517,8 +2521,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4709, - serialized_end=4755, + serialized_start=5050, + serialized_end=5101, ) @@ -2544,7 +2548,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2562,7 +2566,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2580,7 +2584,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2598,7 +2602,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2610,8 +2614,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4757, - serialized_end=4875, + serialized_start=5104, + serialized_end=5242, ) @@ -2637,7 +2641,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2655,7 +2659,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2673,7 +2677,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2685,8 +2689,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4877, - serialized_end=4944, + serialized_start=5244, + serialized_end=5326, ) @@ -2712,7 +2716,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2730,7 +2734,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2748,7 +2752,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2766,7 +2770,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2784,7 +2788,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2802,7 +2806,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2820,7 +2824,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2832,8 +2836,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4947, - serialized_end=5219, + serialized_start=5329, + serialized_end=5636, ) @@ -2859,7 +2863,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2877,7 +2881,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2895,7 +2899,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2913,7 +2917,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2931,7 +2935,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2943,8 +2947,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5222, - serialized_end=5385, + serialized_start=5639, + serialized_end=5827, ) @@ -2970,7 +2974,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2988,7 +2992,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -3000,8 +3004,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5387, - serialized_end=5475, + serialized_start=5829, + serialized_end=5927, ) @@ -3027,7 +3031,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3045,7 +3049,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3063,7 +3067,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -3075,8 +3079,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5477, - serialized_end=5547, + serialized_start=5929, + serialized_end=6014, ) @@ -3102,7 +3106,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3120,7 +3124,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3138,7 +3142,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -3150,8 +3154,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5549, - serialized_end=5619, + serialized_start=6016, + serialized_end=6101, ) _LOGGINGCONFIG_DRIVERLOGLEVELSENTRY.fields_by_name[ @@ -3771,7 +3775,7 @@ state: Output only. A state message specifying the overall job state. details: - Output only. Optional job state details, such as an error + Optional. Output only. Job state details, such as an error description if the state is ERROR. state_start_time: Output only. The time when this state was entered. @@ -4156,24 +4160,110 @@ DESCRIPTOR._options = None _LOGGINGCONFIG_DRIVERLOGLEVELSENTRY._options = None _HADOOPJOB_PROPERTIESENTRY._options = None +_HADOOPJOB.fields_by_name["args"]._options = None +_HADOOPJOB.fields_by_name["jar_file_uris"]._options = None +_HADOOPJOB.fields_by_name["file_uris"]._options = None +_HADOOPJOB.fields_by_name["archive_uris"]._options = None +_HADOOPJOB.fields_by_name["properties"]._options = None +_HADOOPJOB.fields_by_name["logging_config"]._options = None _SPARKJOB_PROPERTIESENTRY._options = None +_SPARKJOB.fields_by_name["args"]._options = None +_SPARKJOB.fields_by_name["jar_file_uris"]._options = None +_SPARKJOB.fields_by_name["file_uris"]._options = None +_SPARKJOB.fields_by_name["archive_uris"]._options = None +_SPARKJOB.fields_by_name["properties"]._options = None +_SPARKJOB.fields_by_name["logging_config"]._options = None _PYSPARKJOB_PROPERTIESENTRY._options = None +_PYSPARKJOB.fields_by_name["main_python_file_uri"]._options = None +_PYSPARKJOB.fields_by_name["args"]._options = None +_PYSPARKJOB.fields_by_name["python_file_uris"]._options = None +_PYSPARKJOB.fields_by_name["jar_file_uris"]._options = None +_PYSPARKJOB.fields_by_name["file_uris"]._options = None +_PYSPARKJOB.fields_by_name["archive_uris"]._options = None +_PYSPARKJOB.fields_by_name["properties"]._options = None +_PYSPARKJOB.fields_by_name["logging_config"]._options = None +_QUERYLIST.fields_by_name["queries"]._options = None _HIVEJOB_SCRIPTVARIABLESENTRY._options = None _HIVEJOB_PROPERTIESENTRY._options = None +_HIVEJOB.fields_by_name["continue_on_failure"]._options = None +_HIVEJOB.fields_by_name["script_variables"]._options = None +_HIVEJOB.fields_by_name["properties"]._options = None +_HIVEJOB.fields_by_name["jar_file_uris"]._options = None _SPARKSQLJOB_SCRIPTVARIABLESENTRY._options = None _SPARKSQLJOB_PROPERTIESENTRY._options = None +_SPARKSQLJOB.fields_by_name["script_variables"]._options = None +_SPARKSQLJOB.fields_by_name["properties"]._options = None +_SPARKSQLJOB.fields_by_name["jar_file_uris"]._options = None +_SPARKSQLJOB.fields_by_name["logging_config"]._options = None _PIGJOB_SCRIPTVARIABLESENTRY._options = None _PIGJOB_PROPERTIESENTRY._options = None +_PIGJOB.fields_by_name["continue_on_failure"]._options = None +_PIGJOB.fields_by_name["script_variables"]._options = None +_PIGJOB.fields_by_name["properties"]._options = None +_PIGJOB.fields_by_name["jar_file_uris"]._options = None +_PIGJOB.fields_by_name["logging_config"]._options = None +_JOBPLACEMENT.fields_by_name["cluster_name"]._options = None +_JOBPLACEMENT.fields_by_name["cluster_uuid"]._options = None +_JOBSTATUS.fields_by_name["state"]._options = None +_JOBSTATUS.fields_by_name["details"]._options = None +_JOBSTATUS.fields_by_name["state_start_time"]._options = None +_JOBSTATUS.fields_by_name["substate"]._options = None +_JOBREFERENCE.fields_by_name["project_id"]._options = None +_JOBREFERENCE.fields_by_name["job_id"]._options = None +_YARNAPPLICATION.fields_by_name["name"]._options = None +_YARNAPPLICATION.fields_by_name["state"]._options = None +_YARNAPPLICATION.fields_by_name["progress"]._options = None +_YARNAPPLICATION.fields_by_name["tracking_url"]._options = None _JOB_LABELSENTRY._options = None +_JOB.fields_by_name["reference"]._options = None +_JOB.fields_by_name["placement"]._options = None +_JOB.fields_by_name["status"]._options = None +_JOB.fields_by_name["status_history"]._options = None +_JOB.fields_by_name["yarn_applications"]._options = None +_JOB.fields_by_name["driver_output_resource_uri"]._options = None +_JOB.fields_by_name["driver_control_files_uri"]._options = None +_JOB.fields_by_name["labels"]._options = None +_JOB.fields_by_name["scheduling"]._options = None +_JOB.fields_by_name["job_uuid"]._options = None +_JOBSCHEDULING.fields_by_name["max_failures_per_hour"]._options = None +_SUBMITJOBREQUEST.fields_by_name["project_id"]._options = None +_SUBMITJOBREQUEST.fields_by_name["region"]._options = None +_SUBMITJOBREQUEST.fields_by_name["job"]._options = None +_SUBMITJOBREQUEST.fields_by_name["request_id"]._options = None +_GETJOBREQUEST.fields_by_name["project_id"]._options = None +_GETJOBREQUEST.fields_by_name["region"]._options = None +_GETJOBREQUEST.fields_by_name["job_id"]._options = None +_LISTJOBSREQUEST.fields_by_name["project_id"]._options = None +_LISTJOBSREQUEST.fields_by_name["region"]._options = None +_LISTJOBSREQUEST.fields_by_name["page_size"]._options = None +_LISTJOBSREQUEST.fields_by_name["page_token"]._options = None +_LISTJOBSREQUEST.fields_by_name["cluster_name"]._options = None +_LISTJOBSREQUEST.fields_by_name["job_state_matcher"]._options = None +_LISTJOBSREQUEST.fields_by_name["filter"]._options = None +_UPDATEJOBREQUEST.fields_by_name["project_id"]._options = None +_UPDATEJOBREQUEST.fields_by_name["region"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job_id"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job"]._options = None +_UPDATEJOBREQUEST.fields_by_name["update_mask"]._options = None +_LISTJOBSRESPONSE.fields_by_name["jobs"]._options = None +_LISTJOBSRESPONSE.fields_by_name["next_page_token"]._options = None +_CANCELJOBREQUEST.fields_by_name["project_id"]._options = None +_CANCELJOBREQUEST.fields_by_name["region"]._options = None +_CANCELJOBREQUEST.fields_by_name["job_id"]._options = None +_DELETEJOBREQUEST.fields_by_name["project_id"]._options = None +_DELETEJOBREQUEST.fields_by_name["region"]._options = None +_DELETEJOBREQUEST.fields_by_name["job_id"]._options = None _JOBCONTROLLER = _descriptor.ServiceDescriptor( name="JobController", full_name="google.cloud.dataproc.v1.JobController", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=5622, - serialized_end=6572, + serialized_options=_b( + "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=6104, + serialized_end=7283, methods=[ _descriptor.MethodDescriptor( name="SubmitJob", @@ -4183,7 +4273,7 @@ input_type=_SUBMITJOBREQUEST, output_type=_JOB, serialized_options=_b( - '\202\323\344\223\002;"6/v1/projects/{project_id}/regions/{region}/jobs:submit:\001*' + '\202\323\344\223\002;"6/v1/projects/{project_id}/regions/{region}/jobs:submit:\001*\332A\025project_id,region,job' ), ), _descriptor.MethodDescriptor( @@ -4194,7 +4284,7 @@ input_type=_GETJOBREQUEST, output_type=_JOB, serialized_options=_b( - "\202\323\344\223\002:\0228/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" + "\202\323\344\223\002:\0228/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\332A\030project_id,region,job_id" ), ), _descriptor.MethodDescriptor( @@ -4205,7 +4295,7 @@ input_type=_LISTJOBSREQUEST, output_type=_LISTJOBSRESPONSE, serialized_options=_b( - "\202\323\344\223\0021\022//v1/projects/{project_id}/regions/{region}/jobs" + "\202\323\344\223\0021\022//v1/projects/{project_id}/regions/{region}/jobs\332A\021project_id,region\332A\030project_id,region,filter" ), ), _descriptor.MethodDescriptor( @@ -4227,7 +4317,7 @@ input_type=_CANCELJOBREQUEST, output_type=_JOB, serialized_options=_b( - '\202\323\344\223\002D"?/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\001*' + '\202\323\344\223\002D"?/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\001*\332A\030project_id,region,job_id' ), ), _descriptor.MethodDescriptor( @@ -4238,7 +4328,7 @@ input_type=_DELETEJOBREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002:*8/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" + "\202\323\344\223\002:*8/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\332A\030project_id,region,job_id" ), ), ], diff --git a/dataproc/google/cloud/dataproc_v1/proto/operations.proto b/dataproc/google/cloud/dataproc_v1/proto/operations.proto index c820cd8e65dc..4af2a5f80795 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/operations.proto +++ b/dataproc/google/cloud/dataproc_v1/proto/operations.proto @@ -17,8 +17,9 @@ syntax = "proto3"; package google.cloud.dataproc.v1; -import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; option java_multiple_files = true; @@ -43,41 +44,41 @@ message ClusterOperationStatus { } // Output only. A message containing the operation state. - State state = 1; + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A message containing the detailed operation state. - string inner_state = 2; + string inner_state = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A message containing any operation metadata details. - string details = 3; + string details = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time this state was entered. - google.protobuf.Timestamp state_start_time = 4; + google.protobuf.Timestamp state_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Metadata describing the operation. message ClusterOperationMetadata { // Output only. Name of the cluster for the operation. - string cluster_name = 7; + string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Cluster UUID for the operation. - string cluster_uuid = 8; + string cluster_uuid = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Current operation status. - ClusterOperationStatus status = 9; + ClusterOperationStatus status = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The previous operation status. - repeated ClusterOperationStatus status_history = 10; + repeated ClusterOperationStatus status_history = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The operation type. - string operation_type = 11; + string operation_type = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Short description of operation. - string description = 12; + string description = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Labels associated with the operation - map labels = 13; + map labels = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Errors encountered during operation execution. - repeated string warnings = 14; + repeated string warnings = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/dataproc/google/cloud/dataproc_v1/proto/operations_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/operations_pb2.py index 0f09da0c701d..f7fadd195d52 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/operations_pb2.py +++ b/dataproc/google/cloud/dataproc_v1/proto/operations_pb2.py @@ -15,8 +15,9 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -27,11 +28,12 @@ "\n\034com.google.cloud.dataproc.v1B\017OperationsProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" ), serialized_pb=_b( - '\n/google/cloud/dataproc_v1/proto/operations.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xf5\x01\n\x16\x43lusterOperationStatus\x12\x45\n\x05state\x18\x01 \x01(\x0e\x32\x36.google.cloud.dataproc.v1.ClusterOperationStatus.State\x12\x13\n\x0binner_state\x18\x02 \x01(\t\x12\x0f\n\x07\x64\x65tails\x18\x03 \x01(\t\x12\x34\n\x10state_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"\x90\x03\n\x18\x43lusterOperationMetadata\x12\x14\n\x0c\x63luster_name\x18\x07 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x08 \x01(\t\x12@\n\x06status\x18\t \x01(\x0b\x32\x30.google.cloud.dataproc.v1.ClusterOperationStatus\x12H\n\x0estatus_history\x18\n \x03(\x0b\x32\x30.google.cloud.dataproc.v1.ClusterOperationStatus\x12\x16\n\x0eoperation_type\x18\x0b \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x0c \x01(\t\x12N\n\x06labels\x18\r \x03(\x0b\x32>.google.cloud.dataproc.v1.ClusterOperationMetadata.LabelsEntry\x12\x10\n\x08warnings\x18\x0e \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42s\n\x1c\x63om.google.cloud.dataproc.v1B\x0fOperationsProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' + '\n/google/cloud/dataproc_v1/proto/operations.proto\x12\x18google.cloud.dataproc.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\x89\x02\n\x16\x43lusterOperationStatus\x12J\n\x05state\x18\x01 \x01(\x0e\x32\x36.google.cloud.dataproc.v1.ClusterOperationStatus.StateB\x03\xe0\x41\x03\x12\x18\n\x0binner_state\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07\x64\x65tails\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x10state_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"\xb8\x03\n\x18\x43lusterOperationMetadata\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x08 \x01(\tB\x03\xe0\x41\x03\x12\x45\n\x06status\x18\t \x01(\x0b\x32\x30.google.cloud.dataproc.v1.ClusterOperationStatusB\x03\xe0\x41\x03\x12M\n\x0estatus_history\x18\n \x03(\x0b\x32\x30.google.cloud.dataproc.v1.ClusterOperationStatusB\x03\xe0\x41\x03\x12\x1b\n\x0eoperation_type\x18\x0b \x01(\tB\x03\xe0\x41\x03\x12\x18\n\x0b\x64\x65scription\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12S\n\x06labels\x18\r \x03(\x0b\x32>.google.cloud.dataproc.v1.ClusterOperationMetadata.LabelsEntryB\x03\xe0\x41\x03\x12\x15\n\x08warnings\x18\x0e \x03(\tB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42s\n\x1c\x63om.google.cloud.dataproc.v1B\x0fOperationsProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) @@ -57,8 +59,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=330, - serialized_end=386, + serialized_start=383, + serialized_end=439, ) _sym_db.RegisterEnumDescriptor(_CLUSTEROPERATIONSTATUS_STATE) @@ -85,7 +87,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -103,7 +105,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -121,7 +123,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -139,7 +141,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -151,8 +153,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=141, - serialized_end=386, + serialized_start=174, + serialized_end=439, ) @@ -208,8 +210,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=744, - serialized_end=789, + serialized_start=837, + serialized_end=882, ) _CLUSTEROPERATIONMETADATA = _descriptor.Descriptor( @@ -234,7 +236,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -252,7 +254,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -270,7 +272,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -288,7 +290,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -306,7 +308,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -324,7 +326,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -342,7 +344,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -360,7 +362,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -372,8 +374,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=389, - serialized_end=789, + serialized_start=442, + serialized_end=882, ) _CLUSTEROPERATIONSTATUS.fields_by_name[ @@ -467,5 +469,17 @@ DESCRIPTOR._options = None +_CLUSTEROPERATIONSTATUS.fields_by_name["state"]._options = None +_CLUSTEROPERATIONSTATUS.fields_by_name["inner_state"]._options = None +_CLUSTEROPERATIONSTATUS.fields_by_name["details"]._options = None +_CLUSTEROPERATIONSTATUS.fields_by_name["state_start_time"]._options = None _CLUSTEROPERATIONMETADATA_LABELSENTRY._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["cluster_name"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["cluster_uuid"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["status"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["status_history"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["operation_type"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["description"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["labels"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["warnings"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/dataproc/google/cloud/dataproc_v1/proto/workflow_templates.proto b/dataproc/google/cloud/dataproc_v1/proto/workflow_templates.proto index 61295a5500dc..8976c42e29a0 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/workflow_templates.proto +++ b/dataproc/google/cloud/dataproc_v1/proto/workflow_templates.proto @@ -18,6 +18,9 @@ syntax = "proto3"; package google.cloud.dataproc.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/dataproc/v1/clusters.proto"; import "google/cloud/dataproc/v1/jobs.proto"; import "google/longrunning/operations.proto"; @@ -32,6 +35,9 @@ option java_package = "com.google.cloud.dataproc.v1"; // The API interface for managing Workflow Templates in the // Cloud Dataproc API. service WorkflowTemplateService { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Creates new workflow template. rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest) returns (WorkflowTemplate) { option (google.api.http) = { @@ -42,6 +48,7 @@ service WorkflowTemplateService { body: "template" } }; + option (google.api.method_signature) = "parent,template"; } // Retrieves the latest workflow template. @@ -55,6 +62,7 @@ service WorkflowTemplateService { get: "/v1/{name=projects/*/regions/*/workflowTemplates/*}" } }; + option (google.api.method_signature) = "name"; } // Instantiates a template and begins execution. @@ -70,7 +78,9 @@ service WorkflowTemplateService { // clusters to be deleted. // // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + // Also see [Using + // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). // // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be @@ -84,6 +94,12 @@ service WorkflowTemplateService { body: "*" } }; + option (google.api.method_signature) = "name"; + option (google.api.method_signature) = "name,parameters"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "WorkflowMetadata" + }; } // Instantiates a template and begins execution. @@ -103,7 +119,9 @@ service WorkflowTemplateService { // clusters to be deleted. // // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + // Also see [Using + // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). // // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be @@ -117,6 +135,11 @@ service WorkflowTemplateService { body: "template" } }; + option (google.api.method_signature) = "parent,template"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "WorkflowMetadata" + }; } // Updates (replaces) workflow template. The updated template @@ -130,6 +153,7 @@ service WorkflowTemplateService { body: "template" } }; + option (google.api.method_signature) = "template"; } // Lists workflows that match the specified filter in the request. @@ -140,6 +164,7 @@ service WorkflowTemplateService { get: "/v1/{parent=projects/*/regions/*}/workflowTemplates" } }; + option (google.api.method_signature) = "parent"; } // Deletes a workflow template. It does not cancel in-progress workflows. @@ -150,22 +175,32 @@ service WorkflowTemplateService { delete: "/v1/{name=projects/*/regions/*/workflowTemplates/*}" } }; + option (google.api.method_signature) = "name"; } } // A Cloud Dataproc workflow template resource. message WorkflowTemplate { - // Required. The template id. - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). Cannot begin or end with underscore - // or hyphen. Must consist of between 3 and 50 characters. - string id = 2; + option (google.api.resource) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + pattern: "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}" + pattern: "projects/{project}/locations/{location}/workflowTemplates/{workflow_template}" + history: ORIGINALLY_SINGLE_PATTERN + }; + + string id = 2 [(google.api.field_behavior) = REQUIRED]; - // Output only. The "resource name" of the template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Output only. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. Used to perform a consistent read-modify-write. // @@ -176,13 +211,13 @@ message WorkflowTemplate { // the current template with the `version` field filled in with the // current server version. The user updates other fields in the template, // then returns it as part of the `UpdateWorkflowTemplate` request. - int32 version = 3; + int32 version = 3 [(google.api.field_behavior) = OPTIONAL]; // Output only. The time template was created. - google.protobuf.Timestamp create_time = 4; + google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time template was last updated. - google.protobuf.Timestamp update_time = 5; + google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The labels to associate with this template. These labels // will be propagated to all jobs and clusters created by the workflow @@ -196,18 +231,18 @@ message WorkflowTemplate { // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). // // No more than 32 labels can be associated with a template. - map labels = 6; + map labels = 6 [(google.api.field_behavior) = OPTIONAL]; // Required. WorkflowTemplate scheduling information. - WorkflowTemplatePlacement placement = 7; + WorkflowTemplatePlacement placement = 7 [(google.api.field_behavior) = REQUIRED]; // Required. The Directed Acyclic Graph of Jobs to submit. - repeated OrderedJob jobs = 8; + repeated OrderedJob jobs = 8 [(google.api.field_behavior) = REQUIRED]; - // Optional. Template parameters whose values are substituted into the + // Optional. emplate parameters whose values are substituted into the // template. Values for parameters must be provided when the template is // instantiated. - repeated TemplateParameter parameters = 9; + repeated TemplateParameter parameters = 9 [(google.api.field_behavior) = OPTIONAL]; } // Specifies workflow execution target. @@ -217,7 +252,7 @@ message WorkflowTemplatePlacement { // Required. Specifies where workflow executes; either on a managed // cluster or an existing cluster chosen by labels. oneof placement { - // Optional. A cluster that is managed by the workflow. + // A cluster that is managed by the workflow. ManagedCluster managed_cluster = 1; // Optional. A selector that chooses target cluster for jobs based @@ -236,10 +271,10 @@ message ManagedCluster { // The name must contain only lower-case letters (a-z), numbers (0-9), // and hyphens (-). Must begin with a letter. Cannot begin or end with // hyphen. Must consist of between 2 and 35 characters. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster configuration. - ClusterConfig config = 3; + ClusterConfig config = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. The labels to associate with this cluster. // @@ -251,7 +286,7 @@ message ManagedCluster { // the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} // // No more than 32 labels can be associated with a given cluster. - map labels = 4; + map labels = 4 [(google.api.field_behavior) = OPTIONAL]; } // A selector that chooses target cluster for jobs based on metadata. @@ -261,11 +296,11 @@ message ClusterSelector { // // If unspecified, the zone of the first cluster matching the selector // is used. - string zone = 1; + string zone = 1 [(google.api.field_behavior) = OPTIONAL]; // Required. The cluster labels. Cluster must have all labels // to match. - map cluster_labels = 2; + map cluster_labels = 2 [(google.api.field_behavior) = REQUIRED]; } // A job executed by the workflow. @@ -281,7 +316,7 @@ message OrderedJob { // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). Cannot begin or end with underscore // or hyphen. Must consist of between 3 and 50 characters. - string step_id = 1; + string step_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The job definition. oneof job_type { @@ -314,14 +349,14 @@ message OrderedJob { // the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} // // No more than 32 labels can be associated with a given job. - map labels = 8; + map labels = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. Job scheduling configuration. - JobScheduling scheduling = 9; + JobScheduling scheduling = 9 [(google.api.field_behavior) = OPTIONAL]; // Optional. The optional list of prerequisite job step_ids. // If not specified, the job will start at the beginning of workflow. - repeated string prerequisite_step_ids = 10; + repeated string prerequisite_step_ids = 10 [(google.api.field_behavior) = OPTIONAL]; } // A configurable parameter that replaces one or more fields in the template. @@ -334,14 +369,14 @@ message OrderedJob { // - Main class (in HadoopJob and SparkJob) // - Zone (in ClusterSelector) message TemplateParameter { - // Required. Parameter name. + // Required. Parameter name. // The parameter name is used as the key, and paired with the // parameter value, which are passed to the template when the template // is instantiated. // The name must contain only capital letters (A-Z), numbers (0-9), and // underscores (_), and must not start with a number. The maximum length is // 40 characters. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Paths to all fields that the parameter replaces. // A field is allowed to appear in at most one parameter's list of field @@ -387,14 +422,14 @@ message TemplateParameter { // // - placement.clusterSelector.clusterLabels // - jobs['step-id'].sparkJob.args - repeated string fields = 2; + repeated string fields = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. Brief description of the parameter. // Must not exceed 1024 characters. - string description = 3; + string description = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Validation rules to be applied to this parameter's value. - ParameterValidation validation = 4; + ParameterValidation validation = 4 [(google.api.field_behavior) = OPTIONAL]; } // Configuration for parameter validation. @@ -414,13 +449,13 @@ message RegexValidation { // Required. RE2 regular expressions used to validate the parameter's value. // The value must match the regex in its entirety (substring // matches are not sufficient). - repeated string regexes = 1; + repeated string regexes = 1 [(google.api.field_behavior) = REQUIRED]; } // Validation based on a list of allowed values. message ValueValidation { // Required. List of allowed values for the parameter. - repeated string values = 1; + repeated string values = 1 [(google.api.field_behavior) = REQUIRED]; } // A Cloud Dataproc workflow template resource. @@ -440,57 +475,66 @@ message WorkflowMetadata { DONE = 3; } - // Output only. The "resource name" of the template. - string template = 1; + // Output only. The resource name of the workflow template as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string template = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The version of template at the time of // workflow instantiation. - int32 version = 2; + int32 version = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The create cluster operation metadata. - ClusterOperation create_cluster = 3; + ClusterOperation create_cluster = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The workflow graph. - WorkflowGraph graph = 4; + WorkflowGraph graph = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The delete cluster operation metadata. - ClusterOperation delete_cluster = 5; + ClusterOperation delete_cluster = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The workflow state. - State state = 6; + State state = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The name of the target cluster. - string cluster_name = 7; + string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Map from parameter names to values that were used for those parameters. map parameters = 8; // Output only. Workflow start time. - google.protobuf.Timestamp start_time = 9; + google.protobuf.Timestamp start_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Workflow end time. - google.protobuf.Timestamp end_time = 10; + google.protobuf.Timestamp end_time = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The UUID of target cluster. - string cluster_uuid = 11; + string cluster_uuid = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The cluster operation triggered by a workflow. message ClusterOperation { // Output only. The id of the cluster operation. - string operation_id = 1; + string operation_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Error, if operation failed. - string error = 2; + string error = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Indicates the operation is done. - bool done = 3; + bool done = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The workflow graph. message WorkflowGraph { // Output only. The workflow nodes. - repeated WorkflowNode nodes = 1; + repeated WorkflowNode nodes = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The workflow node. @@ -518,52 +562,88 @@ message WorkflowNode { } // Output only. The name of the node. - string step_id = 1; + string step_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Node's prerequisite nodes. - repeated string prerequisite_step_ids = 2; + repeated string prerequisite_step_ids = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The job id; populated after the node enters RUNNING state. - string job_id = 3; + string job_id = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The node state. - NodeState state = 5; + NodeState state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The error detail. - string error = 6; + string error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to create a workflow template. message CreateWorkflowTemplateRequest { - // Required. The "resource name" of the region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,create`, the resource name of the + // region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.create`, the resource name of + // the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Required. The Dataproc workflow template to create. - WorkflowTemplate template = 2; + WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to fetch a workflow template. message GetWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.get`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.get`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The version of workflow template to retrieve. Only previously - // instatiated versions can be retrieved. + // instantiated versions can be retrieved. // // If unspecified, retrieves the current version. - int32 version = 2; + int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; } // A request to instantiate a workflow template. message InstantiateWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The version of workflow template to instantiate. If specified, // the workflow will be instantiated only if the current version of @@ -571,7 +651,7 @@ message InstantiateWorkflowTemplateRequest { // // This option cannot be used to instantiate a previous version of // workflow template. - int32 version = 2; + int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. A tag that prevents multiple concurrent workflow // instances with the same tag from running. This mitigates risk of @@ -582,22 +662,34 @@ message InstantiateWorkflowTemplateRequest { // // The tag must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5; + string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. Map from parameter names to values that should be used for those // parameters. Values may not exceed 100 characters. - map parameters = 6; + map parameters = 6 [(google.api.field_behavior) = OPTIONAL]; } // A request to instantiate an inline workflow template. message InstantiateInlineWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,instantiateinline`, the resource + // name of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.instantiateinline`, the + // resource name of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Required. The workflow template to instantiate. - WorkflowTemplate template = 2; + WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. A tag that prevents multiple concurrent workflow // instances with the same tag from running. This mitigates risk of @@ -608,7 +700,7 @@ message InstantiateInlineWorkflowTemplateRequest { // // The tag must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 3; + string request_id = 3 [(google.api.field_behavior) = OPTIONAL]; } // A request to update a workflow template. @@ -616,46 +708,75 @@ message UpdateWorkflowTemplateRequest { // Required. The updated workflow template. // // The `template.version` field must match the current version. - WorkflowTemplate template = 1; + WorkflowTemplate template = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; } // A request to list workflow templates in a project. message ListWorkflowTemplatesRequest { - // Required. The "resource name" of the region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,list`, the resource + // name of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.list`, the + // resource name of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The maximum number of results to return in each response. - int32 page_size = 2; + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The page token, returned by a previous call, to request the // next page of results. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; } // A response to a request to list workflow templates in a project. message ListWorkflowTemplatesResponse { // Output only. WorkflowTemplates list. - repeated WorkflowTemplate templates = 1; + repeated WorkflowTemplate templates = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. This token is included in the response if there are more // results to fetch. To fetch additional results, provide this value as the // page_token in a subsequent ListWorkflowTemplatesRequest. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to delete a workflow template. // // Currently started workflows will remain running. message DeleteWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.delete`, the resource name + // of the template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The version of workflow template to delete. If specified, // will only delete the template if the current server version matches // specified version. - int32 version = 2; + int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; } diff --git a/dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py index a8f2903a1152..e539c2c176c2 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py +++ b/dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.dataproc_v1.proto import ( clusters_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2, ) @@ -37,10 +40,13 @@ "\n\034com.google.cloud.dataproc.v1B\026WorkflowTemplatesProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" ), serialized_pb=_b( - '\n7google/cloud/dataproc_v1/proto/workflow_templates.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a-google/cloud/dataproc_v1/proto/clusters.proto\x1a)google/cloud/dataproc_v1/proto/jobs.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xd3\x03\n\x10WorkflowTemplate\x12\n\n\x02id\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12/\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x46\n\x06labels\x18\x06 \x03(\x0b\x32\x36.google.cloud.dataproc.v1.WorkflowTemplate.LabelsEntry\x12\x46\n\tplacement\x18\x07 \x01(\x0b\x32\x33.google.cloud.dataproc.v1.WorkflowTemplatePlacement\x12\x32\n\x04jobs\x18\x08 \x03(\x0b\x32$.google.cloud.dataproc.v1.OrderedJob\x12?\n\nparameters\x18\t \x03(\x0b\x32+.google.cloud.dataproc.v1.TemplateParameter\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb4\x01\n\x19WorkflowTemplatePlacement\x12\x43\n\x0fmanaged_cluster\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.ManagedClusterH\x00\x12\x45\n\x10\x63luster_selector\x18\x02 \x01(\x0b\x32).google.cloud.dataproc.v1.ClusterSelectorH\x00\x42\x0b\n\tplacement"\xd4\x01\n\x0eManagedCluster\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x37\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfig\x12\x44\n\x06labels\x18\x04 \x03(\x0b\x32\x34.google.cloud.dataproc.v1.ManagedCluster.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xab\x01\n\x0f\x43lusterSelector\x12\x0c\n\x04zone\x18\x01 \x01(\t\x12T\n\x0e\x63luster_labels\x18\x02 \x03(\x0b\x32<.google.cloud.dataproc.v1.ClusterSelector.ClusterLabelsEntry\x1a\x34\n\x12\x43lusterLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xd3\x04\n\nOrderedJob\x12\x0f\n\x07step_id\x18\x01 \x01(\t\x12\x39\n\nhadoop_job\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.HadoopJobH\x00\x12\x37\n\tspark_job\x18\x03 \x01(\x0b\x32".google.cloud.dataproc.v1.SparkJobH\x00\x12;\n\x0bpyspark_job\x18\x04 \x01(\x0b\x32$.google.cloud.dataproc.v1.PySparkJobH\x00\x12\x35\n\x08hive_job\x18\x05 \x01(\x0b\x32!.google.cloud.dataproc.v1.HiveJobH\x00\x12\x33\n\x07pig_job\x18\x06 \x01(\x0b\x32 .google.cloud.dataproc.v1.PigJobH\x00\x12>\n\rspark_sql_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12@\n\x06labels\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.OrderedJob.LabelsEntry\x12;\n\nscheduling\x18\t \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobScheduling\x12\x1d\n\x15prerequisite_step_ids\x18\n \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x89\x01\n\x11TemplateParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x41\n\nvalidation\x18\x04 \x01(\x0b\x32-.google.cloud.dataproc.v1.ParameterValidation"\xa1\x01\n\x13ParameterValidation\x12:\n\x05regex\x18\x01 \x01(\x0b\x32).google.cloud.dataproc.v1.RegexValidationH\x00\x12;\n\x06values\x18\x02 \x01(\x0b\x32).google.cloud.dataproc.v1.ValueValidationH\x00\x42\x11\n\x0fvalidation_type""\n\x0fRegexValidation\x12\x0f\n\x07regexes\x18\x01 \x03(\t"!\n\x0fValueValidation\x12\x0e\n\x06values\x18\x01 \x03(\t"\xfd\x04\n\x10WorkflowMetadata\x12\x10\n\x08template\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x42\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperation\x12\x36\n\x05graph\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.WorkflowGraph\x12\x42\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperation\x12?\n\x05state\x18\x06 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowMetadata.State\x12\x14\n\x0c\x63luster_name\x18\x07 \x01(\t\x12N\n\nparameters\x18\x08 \x03(\x0b\x32:.google.cloud.dataproc.v1.WorkflowMetadata.ParametersEntry\x12.\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x63luster_uuid\x18\x0b \x01(\t\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"E\n\x10\x43lusterOperation\x12\x14\n\x0coperation_id\x18\x01 \x01(\t\x12\r\n\x05\x65rror\x18\x02 \x01(\t\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08"F\n\rWorkflowGraph\x12\x35\n\x05nodes\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1.WorkflowNode"\x8a\x02\n\x0cWorkflowNode\x12\x0f\n\x07step_id\x18\x01 \x01(\t\x12\x1d\n\x15prerequisite_step_ids\x18\x02 \x03(\t\x12\x0e\n\x06job_id\x18\x03 \x01(\t\x12?\n\x05state\x18\x05 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowNode.NodeState\x12\r\n\x05\x65rror\x18\x06 \x01(\t"j\n\tNodeState\x12\x1a\n\x16NODE_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"m\n\x1d\x43reateWorkflowTemplateRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12<\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplate";\n\x1aGetWorkflowTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05"\xec\x01\n"InstantiateWorkflowTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x12\n\nrequest_id\x18\x05 \x01(\t\x12`\n\nparameters\x18\x06 \x03(\x0b\x32L.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.ParametersEntry\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x8c\x01\n(InstantiateInlineWorkflowTemplateRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12<\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplate\x12\x12\n\nrequest_id\x18\x03 \x01(\t"]\n\x1dUpdateWorkflowTemplateRequest\x12<\n\x08template\x18\x01 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplate"U\n\x1cListWorkflowTemplatesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"w\n\x1dListWorkflowTemplatesResponse\x12=\n\ttemplates\x18\x01 \x03(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplate\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t">\n\x1d\x44\x65leteWorkflowTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x32\xe2\x0e\n\x17WorkflowTemplateService\x12\x89\x02\n\x16\x43reateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\x89\x01\x82\xd3\xe4\x93\x02\x82\x01"5/v1/{parent=projects/*/locations/*}/workflowTemplates:\x08templateZ?"3/v1/{parent=projects/*/regions/*}/workflowTemplates:\x08template\x12\xed\x01\n\x13GetWorkflowTemplate\x12\x34.google.cloud.dataproc.v1.GetWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"t\x82\xd3\xe4\x93\x02n\x12\x35/v1/{name=projects/*/locations/*/workflowTemplates/*}Z5\x12\x33/v1/{name=projects/*/regions/*/workflowTemplates/*}\x12\x90\x02\n\x1bInstantiateWorkflowTemplate\x12<.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\x93\x01\x82\xd3\xe4\x93\x02\x8c\x01"A/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*ZD"?/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*\x12\xb6\x02\n!InstantiateInlineWorkflowTemplate\x12\x42.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xad\x01\x82\xd3\xe4\x93\x02\xa6\x01"G/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZQ"E/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\x12\x9b\x02\n\x16UpdateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\x9b\x01\x82\xd3\xe4\x93\x02\x94\x01\x1a>/v1/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08templateZH\x1a\n\rspark_sql_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12\x45\n\x06labels\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.OrderedJob.LabelsEntryB\x03\xe0\x41\x01\x12@\n\nscheduling\x18\t \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobSchedulingB\x03\xe0\x41\x01\x12"\n\x15prerequisite_step_ids\x18\n \x03(\tB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x9d\x01\n\x11TemplateParameter\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ields\x18\x02 \x03(\tB\x03\xe0\x41\x02\x12\x18\n\x0b\x64\x65scription\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x46\n\nvalidation\x18\x04 \x01(\x0b\x32-.google.cloud.dataproc.v1.ParameterValidationB\x03\xe0\x41\x01"\xa1\x01\n\x13ParameterValidation\x12:\n\x05regex\x18\x01 \x01(\x0b\x32).google.cloud.dataproc.v1.RegexValidationH\x00\x12;\n\x06values\x18\x02 \x01(\x0b\x32).google.cloud.dataproc.v1.ValueValidationH\x00\x42\x11\n\x0fvalidation_type"\'\n\x0fRegexValidation\x12\x14\n\x07regexes\x18\x01 \x03(\tB\x03\xe0\x41\x02"&\n\x0fValueValidation\x12\x13\n\x06values\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xaf\x05\n\x10WorkflowMetadata\x12\x15\n\x08template\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x03\x12G\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperationB\x03\xe0\x41\x03\x12;\n\x05graph\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.WorkflowGraphB\x03\xe0\x41\x03\x12G\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperationB\x03\xe0\x41\x03\x12\x44\n\x05state\x18\x06 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowMetadata.StateB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12N\n\nparameters\x18\x08 \x03(\x0b\x32:.google.cloud.dataproc.v1.WorkflowMetadata.ParametersEntry\x12\x33\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x0b \x01(\tB\x03\xe0\x41\x03\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"T\n\x10\x43lusterOperation\x12\x19\n\x0coperation_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04\x64one\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03"K\n\rWorkflowGraph\x12:\n\x05nodes\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1.WorkflowNodeB\x03\xe0\x41\x03"\xa3\x02\n\x0cWorkflowNode\x12\x14\n\x07step_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12"\n\x15prerequisite_step_ids\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x44\n\x05state\x18\x05 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowNode.NodeStateB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x06 \x01(\tB\x03\xe0\x41\x03"j\n\tNodeState\x12\x1a\n\x16NODE_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"\xa4\x01\n\x1d\x43reateWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x02"r\n\x1aGetWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"\xad\x02\n"InstantiateWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x65\n\nparameters\x18\x06 \x03(\x0b\x32L.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.ParametersEntryB\x03\xe0\x41\x01\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xc8\x01\n(InstantiateInlineWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x8f\x01\n\x1dUpdateWorkflowTemplateRequest\x12n\n\x08template\x18\x01 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate"\x91\x01\n\x1cListWorkflowTemplatesRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x81\x01\n\x1dListWorkflowTemplatesResponse\x12\x42\n\ttemplates\x18\x01 \x03(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"u\n\x1d\x44\x65leteWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x32\xe6\x10\n\x17WorkflowTemplateService\x12\x9b\x02\n\x16\x43reateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\x9b\x01\x82\xd3\xe4\x93\x02\x82\x01"5/v1/{parent=projects/*/locations/*}/workflowTemplates:\x08templateZ?"3/v1/{parent=projects/*/regions/*}/workflowTemplates:\x08template\xda\x41\x0fparent,template\x12\xf4\x01\n\x13GetWorkflowTemplate\x12\x34.google.cloud.dataproc.v1.GetWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"{\x82\xd3\xe4\x93\x02n\x12\x35/v1/{name=projects/*/locations/*/workflowTemplates/*}Z5\x12\x33/v1/{name=projects/*/regions/*/workflowTemplates/*}\xda\x41\x04name\x12\xd5\x02\n\x1bInstantiateWorkflowTemplate\x12<.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xd8\x01\x82\xd3\xe4\x93\x02\x8c\x01"A/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*ZD"?/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*\xda\x41\x04name\xda\x41\x0fname,parameters\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xf4\x02\n!InstantiateInlineWorkflowTemplate\x12\x42.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xeb\x01\x82\xd3\xe4\x93\x02\xa6\x01"G/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZQ"E/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\xda\x41\x0fparent,template\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xa6\x02\n\x16UpdateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\xa6\x01\x82\xd3\xe4\x93\x02\x94\x01\x1a>/v1/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08templateZH\x1a/v1/{template.name=projects/*/locations/*/workflowTemplates/*}:\010templateZH\032/v1/{template.name=projects/*/locations/*/workflowTemplates/*}:\010templateZH\032>> response = client.create_autoscaling_policy(parent, policy) Args: - parent (str): Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}``. - policy (Union[dict, ~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy]): The autoscaling policy to create. + parent (str): Required. The "resource name" of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.create``, the resource + name has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.autoscalingPolicies.create``, the resource + name has the following format: + ``projects/{project_id}/locations/{location}`` + policy (Union[dict, ~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy]): Required. The autoscaling policy to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy` @@ -385,8 +392,15 @@ def get_autoscaling_policy( Args: name (str): Required. The "resource name" of the autoscaling policy, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}``. + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.get``, the resource name + of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies.get``, the resource name + of the policy has the following format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -468,9 +482,16 @@ def list_autoscaling_policies( ... pass Args: - parent (str): Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}`` + parent (str): Required. The "resource name" of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.list``, the resource name + of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.autoscalingPolicies.list``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -562,8 +583,15 @@ def delete_autoscaling_policy( Args: name (str): Required. The "resource name" of the autoscaling policy, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}``. + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.delete``, the resource + name of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies.delete``, the resource + name of the policy has the following format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py index 1c422994b9f5..246638c9473b 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py +++ b/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py @@ -207,7 +207,9 @@ def create_cluster( metadata=None, ): """ - Creates a cluster in a project. + Creates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Example: >>> from google.cloud import dataproc_v1beta2 @@ -310,7 +312,9 @@ def update_cluster( metadata=None, ): """ - Updates a cluster in a project. + Updates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Example: >>> from google.cloud import dataproc_v1beta2 @@ -505,7 +509,9 @@ def delete_cluster( metadata=None, ): """ - Deletes a cluster in a project. + Deletes a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Example: >>> from google.cloud import dataproc_v1beta2 @@ -795,8 +801,11 @@ def diagnose_cluster( metadata=None, ): """ - Gets cluster diagnostic information. After the operation completes, the - Operation.response field contains ``DiagnoseClusterOutputLocation``. + Gets cluster diagnostic information. The returned ``Operation.metadata`` + will be + `ClusterOperationMetadata `__. + After the operation completes, ``Operation.response`` contains + `Empty `__. Example: >>> from google.cloud import dataproc_v1beta2 diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py index 72cee7d4b47b..b708113b7879 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py +++ b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py @@ -119,7 +119,9 @@ def channel(self): def create_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.create_cluster`. - Creates a cluster in a project. + Creates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Returns: Callable: A callable which accepts the appropriate @@ -132,7 +134,9 @@ def create_cluster(self): def update_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.update_cluster`. - Updates a cluster in a project. + Updates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Returns: Callable: A callable which accepts the appropriate @@ -145,7 +149,9 @@ def update_cluster(self): def delete_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.delete_cluster`. - Deletes a cluster in a project. + Deletes a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Returns: Callable: A callable which accepts the appropriate @@ -184,8 +190,11 @@ def list_clusters(self): def diagnose_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.diagnose_cluster`. - Gets cluster diagnostic information. After the operation completes, the - Operation.response field contains ``DiagnoseClusterOutputLocation``. + Gets cluster diagnostic information. The returned ``Operation.metadata`` + will be + `ClusterOperationMetadata `__. + After the operation completes, ``Operation.response`` contains + `Empty `__. Returns: Callable: A callable which accepts the appropriate diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py index 86bc98b33810..14398811aedf 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py +++ b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py @@ -160,7 +160,10 @@ def instantiate_workflow_template(self): cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. @@ -189,7 +192,10 @@ def instantiate_inline_workflow_template(self): cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py index f7ad32daf927..9e67cd3f6e35 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py +++ b/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py @@ -243,9 +243,16 @@ def create_workflow_template( >>> response = client.create_workflow_template(parent, template) Args: - parent (str): Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}`` + parent (str): Required. The resource name of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates,create``, the resource name + of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.create``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The Dataproc workflow template to create. If a dict is provided, it must be of the same form as the protobuf @@ -324,11 +331,18 @@ def get_workflow_template( >>> response = client.get_workflow_template(name) Args: - name (str): Required. The "resource name" of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + name (str): Required. The resource name of the workflow template, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates.get``, the resource name of + the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.get``, the resource name + of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to retrieve. Only previously - instatiated versions can be retrieved. + instantiated versions can be retrieved. If unspecified, retrieves the current version. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -403,7 +417,10 @@ def instantiate_workflow_template( cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. @@ -426,9 +443,16 @@ def instantiate_workflow_template( >>> metadata = response.metadata() Args: - name (str): Required. The "resource name" of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + name (str): Required. The resource name of the workflow template, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates.instantiate``, the resource + name of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.instantiate``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to instantiate. If specified, the workflow will be instantiated only if the current version of the workflow template has the supplied version. @@ -534,7 +558,10 @@ def instantiate_inline_workflow_template( cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. @@ -560,9 +587,16 @@ def instantiate_inline_workflow_template( >>> metadata = response.metadata() Args: - parent (str): Required. The "resource name" of the workflow template region, as - described in https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}`` + parent (str): Required. The resource name of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates,instantiateinline``, the + resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.instantiateinline``, the + resource name of the location has the following format: + ``projects/{project_id}/locations/{location}`` template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The workflow template to instantiate. If a dict is provided, it must be of the same form as the protobuf @@ -751,9 +785,16 @@ def list_workflow_templates( ... pass Args: - parent (str): Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}`` + parent (str): Required. The resource name of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates,list``, the resource name of + the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.list``, the resource name + of the location has the following format: + ``projects/{project_id}/locations/{location}`` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -844,9 +885,16 @@ def delete_workflow_template( >>> client.delete_workflow_template(name) Args: - name (str): Required. The "resource name" of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + name (str): Required. The resource name of the workflow template, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates.delete``, the resource name + of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.instantiate``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to delete. If specified, will only delete the template if the current server version matches specified version. diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto index 0c3efbd067ae..36d507c82638 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto @@ -18,12 +18,11 @@ syntax = "proto3"; package google.cloud.dataproc.v1beta2; import "google/api/annotations.proto"; -import "google/cloud/dataproc/v1beta2/clusters.proto"; -import "google/cloud/dataproc/v1beta2/jobs.proto"; -import "google/longrunning/operations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc"; option java_multiple_files = true; @@ -33,6 +32,9 @@ option java_package = "com.google.cloud.dataproc.v1beta2"; // The API interface for managing autoscaling policies in the // Google Cloud Dataproc API. service AutoscalingPolicyService { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Creates new autoscaling policy. rpc CreateAutoscalingPolicy(CreateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { option (google.api.http) = { @@ -43,6 +45,7 @@ service AutoscalingPolicyService { body: "policy" } }; + option (google.api.method_signature) = "parent,policy"; } // Updates (replaces) autoscaling policy. @@ -58,6 +61,7 @@ service AutoscalingPolicyService { body: "policy" } }; + option (google.api.method_signature) = "policy"; } // Retrieves autoscaling policy. @@ -68,6 +72,7 @@ service AutoscalingPolicyService { get: "/v1beta2/{name=projects/*/regions/*/autoscalingPolicies/*}" } }; + option (google.api.method_signature) = "name"; } // Lists autoscaling policies in the project. @@ -78,6 +83,7 @@ service AutoscalingPolicyService { get: "/v1beta2/{parent=projects/*/regions/*}/autoscalingPolicies" } }; + option (google.api.method_signature) = "parent"; } // Deletes an autoscaling policy. It is an error to delete an autoscaling @@ -89,22 +95,37 @@ service AutoscalingPolicyService { delete: "/v1beta2/{name=projects/*/regions/*/autoscalingPolicies/*}" } }; + option (google.api.method_signature) = "name"; } } // Describes an autoscaling policy for Dataproc cluster autoscaler. message AutoscalingPolicy { + option (google.api.resource) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + pattern: "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}" + pattern: "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}" + history: ORIGINALLY_SINGLE_PATTERN + }; + // Required. The policy id. // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). Cannot begin or end with underscore // or hyphen. Must consist of between 3 and 50 characters. - string id = 1; + string id = 1 [(google.api.field_behavior) = REQUIRED]; - // Output only. The "resource name" of the policy, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`. - string name = 2; + // Output only. The "resource name" of the autoscaling policy, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies`, the resource name of the + // policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies`, the resource name of the + // policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Required. Autoscaling algorithm for policy. oneof algorithm { @@ -112,22 +133,22 @@ message AutoscalingPolicy { } // Required. Describes how the autoscaler will operate for primary workers. - InstanceGroupAutoscalingPolicyConfig worker_config = 4; + InstanceGroupAutoscalingPolicyConfig worker_config = 4 [(google.api.field_behavior) = REQUIRED]; // Optional. Describes how the autoscaler will operate for secondary workers. - InstanceGroupAutoscalingPolicyConfig secondary_worker_config = 5; + InstanceGroupAutoscalingPolicyConfig secondary_worker_config = 5 [(google.api.field_behavior) = OPTIONAL]; } // Basic algorithm for autoscaling. message BasicAutoscalingAlgorithm { // Required. YARN autoscaling configuration. - BasicYarnAutoscalingConfig yarn_config = 1; + BasicYarnAutoscalingConfig yarn_config = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Duration between scaling events. A scaling period starts after // the update operation from the previous event has completed. // // Bounds: [2m, 1d]. Default: 2m. - google.protobuf.Duration cooldown_period = 2; + google.protobuf.Duration cooldown_period = 2 [(google.api.field_behavior) = OPTIONAL]; } // Basic autoscaling configurations for YARN. @@ -138,7 +159,7 @@ message BasicYarnAutoscalingConfig { // downscaling operations. // // Bounds: [0s, 1d]. - google.protobuf.Duration graceful_decommission_timeout = 5; + google.protobuf.Duration graceful_decommission_timeout = 5 [(google.api.field_behavior) = REQUIRED]; // Required. Fraction of average pending memory in the last cooldown period // for which to add workers. A scale-up factor of 1.0 will result in scaling @@ -147,7 +168,7 @@ message BasicYarnAutoscalingConfig { // magnitude of scaling up (less aggressive scaling). // // Bounds: [0.0, 1.0]. - double scale_up_factor = 1; + double scale_up_factor = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Fraction of average pending memory in the last cooldown period // for which to remove workers. A scale-down factor of 1 will result in @@ -156,7 +177,7 @@ message BasicYarnAutoscalingConfig { // removing workers, which can be beneficial for autoscaling a single job. // // Bounds: [0.0, 1.0]. - double scale_down_factor = 2; + double scale_down_factor = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. Minimum scale-up threshold as a fraction of total cluster size // before scaling occurs. For example, in a 20-worker cluster, a threshold of @@ -165,7 +186,7 @@ message BasicYarnAutoscalingConfig { // on any recommended change. // // Bounds: [0.0, 1.0]. Default: 0.0. - double scale_up_min_worker_fraction = 3; + double scale_up_min_worker_fraction = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Minimum scale-down threshold as a fraction of total cluster size // before scaling occurs. For example, in a 20-worker cluster, a threshold of @@ -174,7 +195,7 @@ message BasicYarnAutoscalingConfig { // on any recommended change. // // Bounds: [0.0, 1.0]. Default: 0.0. - double scale_down_min_worker_fraction = 4; + double scale_down_min_worker_fraction = 4 [(google.api.field_behavior) = OPTIONAL]; } // Configuration for the size bounds of an instance group, including its @@ -184,7 +205,7 @@ message InstanceGroupAutoscalingPolicyConfig { // // Primary workers - Bounds: [2, max_instances]. Default: 2. // Secondary workers - Bounds: [0, max_instances]. Default: 0. - int32 min_instances = 1; + int32 min_instances = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. Maximum number of instances for this group. Required for primary // workers. Note that by default, clusters will not use secondary workers. @@ -192,7 +213,7 @@ message InstanceGroupAutoscalingPolicyConfig { // // Primary workers - Bounds: [min_instances, ). Required. // Secondary workers - Bounds: [min_instances, ). Default: 0. - int32 max_instances = 2; + int32 max_instances = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. Weight for the instance group, which is used to determine the // fraction of total workers in the cluster from this instance group. @@ -212,32 +233,61 @@ message InstanceGroupAutoscalingPolicyConfig { // zero weight on the unset group. For example if weight is set only on // primary workers, the cluster will use primary workers only and no // secondary workers. - int32 weight = 3; + int32 weight = 3 [(google.api.field_behavior) = OPTIONAL]; } // A request to create an autoscaling policy. message CreateAutoscalingPolicyRequest { - // Required. The "resource name" of the region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}`. - string parent = 1; - - // The autoscaling policy to create. - AutoscalingPolicy policy = 2; + // Required. The "resource name" of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.create`, the resource name + // has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.autoscalingPolicies.create`, the resource name + // has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; + + // Required. The autoscaling policy to create. + AutoscalingPolicy policy = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to fetch an autoscaling policy. message GetAutoscalingPolicyRequest { // Required. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`. - string name = 1; + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.get`, the resource name + // of the policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies.get`, the resource name + // of the policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; } // A request to update an autoscaling policy. message UpdateAutoscalingPolicyRequest { // Required. The updated autoscaling policy. - AutoscalingPolicy policy = 1; + AutoscalingPolicy policy = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; } // A request to delete an autoscaling policy. @@ -245,32 +295,57 @@ message UpdateAutoscalingPolicyRequest { // Autoscaling policies in use by one or more clusters will not be deleted. message DeleteAutoscalingPolicyRequest { // Required. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`. - string name = 1; + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.delete`, the resource name + // of the policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies.delete`, the resource name + // of the policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; } // A request to list autoscaling policies in a project. message ListAutoscalingPoliciesRequest { - // Required. The "resource name" of the region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The "resource name" of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.list`, the resource name + // of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.autoscalingPolicies.list`, the resource name + // of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; // Optional. The maximum number of results to return in each response. - int32 page_size = 2; + // Must be less than or equal to 1000. Defaults to 100. + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The page token, returned by a previous call, to request the // next page of results. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; } // A response to a request to list autoscaling policies in a project. message ListAutoscalingPoliciesResponse { // Output only. Autoscaling policies list. - repeated AutoscalingPolicy policies = 1; + repeated AutoscalingPolicy policies = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. This token is included in the response if there are more // results to fetch. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py index 880fe573d879..50c0c54dd6fd 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py @@ -16,18 +16,11 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.dataproc_v1beta2.proto import ( - clusters_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2, -) -from google.cloud.dataproc_v1beta2.proto import ( - jobs_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -38,16 +31,15 @@ "\n!com.google.cloud.dataproc.v1beta2B\030AutoscalingPoliciesProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" ), serialized_pb=_b( - '\n>google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x32google/cloud/dataproc_v1beta2/proto/clusters.proto\x1a.google/cloud/dataproc_v1beta2/proto/jobs.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xd1\x02\n\x11\x41utoscalingPolicy\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12S\n\x0f\x62\x61sic_algorithm\x18\x03 \x01(\x0b\x32\x38.google.cloud.dataproc.v1beta2.BasicAutoscalingAlgorithmH\x00\x12Z\n\rworker_config\x18\x04 \x01(\x0b\x32\x43.google.cloud.dataproc.v1beta2.InstanceGroupAutoscalingPolicyConfig\x12\x64\n\x17secondary_worker_config\x18\x05 \x01(\x0b\x32\x43.google.cloud.dataproc.v1beta2.InstanceGroupAutoscalingPolicyConfigB\x0b\n\talgorithm"\x9f\x01\n\x19\x42\x61sicAutoscalingAlgorithm\x12N\n\x0byarn_config\x18\x01 \x01(\x0b\x32\x39.google.cloud.dataproc.v1beta2.BasicYarnAutoscalingConfig\x12\x32\n\x0f\x63ooldown_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xe0\x01\n\x1a\x42\x61sicYarnAutoscalingConfig\x12@\n\x1dgraceful_decommission_timeout\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x17\n\x0fscale_up_factor\x18\x01 \x01(\x01\x12\x19\n\x11scale_down_factor\x18\x02 \x01(\x01\x12$\n\x1cscale_up_min_worker_fraction\x18\x03 \x01(\x01\x12&\n\x1escale_down_min_worker_fraction\x18\x04 \x01(\x01"d\n$InstanceGroupAutoscalingPolicyConfig\x12\x15\n\rmin_instances\x18\x01 \x01(\x05\x12\x15\n\rmax_instances\x18\x02 \x01(\x05\x12\x0e\n\x06weight\x18\x03 \x01(\x05"r\n\x1e\x43reateAutoscalingPolicyRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12@\n\x06policy\x18\x02 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicy"+\n\x1bGetAutoscalingPolicyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"b\n\x1eUpdateAutoscalingPolicyRequest\x12@\n\x06policy\x18\x01 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicy".\n\x1e\x44\x65leteAutoscalingPolicyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"W\n\x1eListAutoscalingPoliciesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"~\n\x1fListAutoscalingPoliciesResponse\x12\x42\n\x08policies\x18\x01 \x03(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicy\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x92\x0b\n\x18\x41utoscalingPolicyService\x12\xa0\x02\n\x17\x43reateAutoscalingPolicy\x12=.google.cloud.dataproc.v1beta2.CreateAutoscalingPolicyRequest\x1a\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicy"\x93\x01\x82\xd3\xe4\x93\x02\x8c\x01".google.cloud.dataproc.v1beta2.ListAutoscalingPoliciesResponse"\x82\x01\x82\xd3\xe4\x93\x02|\x12google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto"\xb9\x04\n\x11\x41utoscalingPolicy\x12\x0f\n\x02id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12S\n\x0f\x62\x61sic_algorithm\x18\x03 \x01(\x0b\x32\x38.google.cloud.dataproc.v1beta2.BasicAutoscalingAlgorithmH\x00\x12_\n\rworker_config\x18\x04 \x01(\x0b\x32\x43.google.cloud.dataproc.v1beta2.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x02\x12i\n\x17secondary_worker_config\x18\x05 \x01(\x0b\x32\x43.google.cloud.dataproc.v1beta2.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x01:\xd1\x01\xea\x41\xcd\x01\n)dataproc.googleapis.com/AutoscalingPolicy\x12Lprojects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}\x12Pprojects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy} \x01\x42\x0b\n\talgorithm"\xa9\x01\n\x19\x42\x61sicAutoscalingAlgorithm\x12S\n\x0byarn_config\x18\x01 \x01(\x0b\x32\x39.google.cloud.dataproc.v1beta2.BasicYarnAutoscalingConfigB\x03\xe0\x41\x02\x12\x37\n\x0f\x63ooldown_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\xf9\x01\n\x1a\x42\x61sicYarnAutoscalingConfig\x12\x45\n\x1dgraceful_decommission_timeout\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12\x1c\n\x0fscale_up_factor\x18\x01 \x01(\x01\x42\x03\xe0\x41\x02\x12\x1e\n\x11scale_down_factor\x18\x02 \x01(\x01\x42\x03\xe0\x41\x02\x12)\n\x1cscale_up_min_worker_fraction\x18\x03 \x01(\x01\x42\x03\xe0\x41\x01\x12+\n\x1escale_down_min_worker_fraction\x18\x04 \x01(\x01\x42\x03\xe0\x41\x01"s\n$InstanceGroupAutoscalingPolicyConfig\x12\x1a\n\rmin_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1a\n\rmax_instances\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x13\n\x06weight\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01"\xaa\x01\n\x1e\x43reateAutoscalingPolicyRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12\x45\n\x06policy\x18\x02 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicyB\x03\xe0\x41\x02"^\n\x1bGetAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"\x95\x01\n\x1eUpdateAutoscalingPolicyRequest\x12s\n\x06policy\x18\x01 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicyB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"a\n\x1e\x44\x65leteAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"\x94\x01\n\x1eListAutoscalingPoliciesRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x88\x01\n\x1fListAutoscalingPoliciesResponse\x12G\n\x08policies\x18\x01 \x03(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicyB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\x32\x8f\x0c\n\x18\x41utoscalingPolicyService\x12\xb0\x02\n\x17\x43reateAutoscalingPolicy\x12=.google.cloud.dataproc.v1beta2.CreateAutoscalingPolicyRequest\x1a\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicy"\xa3\x01\x82\xd3\xe4\x93\x02\x8c\x01".google.cloud.dataproc.v1beta2.ListAutoscalingPoliciesResponse"\x8b\x01\x82\xd3\xe4\x93\x02|\x12 labels = 8; + map labels = 8 [(google.api.field_behavior) = OPTIONAL]; // Output only. Cluster status. - ClusterStatus status = 4; + ClusterStatus status = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The previous cluster status. - repeated ClusterStatus status_history = 7; + repeated ClusterStatus status_history = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc // generates this value when it creates the cluster. - string cluster_uuid = 6; + string cluster_uuid = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Contains cluster daemon metrics such as HDFS and YARN stats. // // **Beta Feature**: This report is available for testing purposes only. It // may be changed before final release. - ClusterMetrics metrics = 9; + ClusterMetrics metrics = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The cluster config. @@ -132,29 +170,29 @@ message ClusterConfig { // and manage this project-level, per-location bucket (see // [Cloud Dataproc staging // bucket](/dataproc/docs/concepts/configuring-clusters/staging-bucket)). - string config_bucket = 1; + string config_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The shared Compute Engine config settings for // all instances in a cluster. - GceClusterConfig gce_cluster_config = 8; + GceClusterConfig gce_cluster_config = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // the master instance in a cluster. - InstanceGroupConfig master_config = 9; + InstanceGroupConfig master_config = 9 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // worker instances in a cluster. - InstanceGroupConfig worker_config = 10; + InstanceGroupConfig worker_config = 10 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // additional worker instances in a cluster. - InstanceGroupConfig secondary_worker_config = 12; + InstanceGroupConfig secondary_worker_config = 12 [(google.api.field_behavior) = OPTIONAL]; // Optional. The config settings for software inside the cluster. - SoftwareConfig software_config = 13; + SoftwareConfig software_config = 13 [(google.api.field_behavior) = OPTIONAL]; // Optional. The config setting for auto delete cluster schedule. - LifecycleConfig lifecycle_config = 14; + LifecycleConfig lifecycle_config = 14 [(google.api.field_behavior) = OPTIONAL]; // Optional. Commands to execute on each node after config is // completed. By default, executables are run on master and all worker nodes. @@ -169,31 +207,31 @@ message ClusterConfig { // else // ... worker specific actions ... // fi - repeated NodeInitializationAction initialization_actions = 11; + repeated NodeInitializationAction initialization_actions = 11 [(google.api.field_behavior) = OPTIONAL]; // Optional. Encryption settings for the cluster. - EncryptionConfig encryption_config = 15; + EncryptionConfig encryption_config = 15 [(google.api.field_behavior) = OPTIONAL]; // Optional. Autoscaling config for the policy associated with the cluster. // Cluster does not autoscale if this field is unset. - AutoscalingConfig autoscaling_config = 16; + AutoscalingConfig autoscaling_config = 16 [(google.api.field_behavior) = OPTIONAL]; // Optional. Port/endpoint configuration for this cluster - EndpointConfig endpoint_config = 17; + EndpointConfig endpoint_config = 17 [(google.api.field_behavior) = OPTIONAL]; // Optional. Security related configuration. - SecurityConfig security_config = 18; + SecurityConfig security_config = 18 [(google.api.field_behavior) = OPTIONAL]; } // Endpoint config for this cluster message EndpointConfig { // Output only. The map of port descriptions to URLs. Will only be populated // if enable_http_port_access is true. - map http_ports = 1; + map http_ports = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. If true, enable http access to specific ports on the cluster // from external sources. Defaults to false. - bool enable_http_port_access = 2; + bool enable_http_port_access = 2 [(google.api.field_behavior) = OPTIONAL]; } // Autoscaling Policy config associated with the cluster. @@ -207,14 +245,14 @@ message AutoscalingConfig { // * `projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` // // Note that the policy must be in the same project and Cloud Dataproc region. - string policy_uri = 1; + string policy_uri = 1 [(google.api.field_behavior) = OPTIONAL]; } // Encryption settings for the cluster. message EncryptionConfig { // Optional. The Cloud KMS key name to use for PD disk encryption for all // instances in the cluster. - string gce_pd_kms_key_name = 1; + string gce_pd_kms_key_name = 1 [(google.api.field_behavior) = OPTIONAL]; } // Common config settings for resources of Compute Engine cluster @@ -231,7 +269,7 @@ message GceClusterConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]` // * `projects/[project_id]/zones/[zone]` // * `us-central1-f` - string zone_uri = 1; + string zone_uri = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine network to be used for machine // communications. Cannot be specified with subnetwork_uri. If neither @@ -244,7 +282,7 @@ message GceClusterConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default` // * `projects/[project_id]/regions/global/default` // * `default` - string network_uri = 2; + string network_uri = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine subnetwork to be used for machine // communications. Cannot be specified with network_uri. @@ -254,7 +292,7 @@ message GceClusterConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0` // * `projects/[project_id]/regions/us-east1/subnetworks/sub0` // * `sub0` - string subnetwork_uri = 6; + string subnetwork_uri = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. If true, all instances in the cluster will only have internal IP // addresses. By default, clusters are not restricted to internal IP @@ -262,7 +300,7 @@ message GceClusterConfig { // instance. This `internal_ip_only` restriction can only be enabled for // subnetwork enabled networks, and all off-cluster dependencies must be // configured to be accessible without external IP addresses. - bool internal_ip_only = 7; + bool internal_ip_only = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The service account of the instances. Defaults to the default // Compute Engine service account. Custom service accounts need @@ -275,7 +313,7 @@ message GceClusterConfig { // https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts // for more information). // Example: `[account_id]@[project_id].iam.gserviceaccount.com` - string service_account = 8; + string service_account = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. The URIs of service account scopes to be included in // Compute Engine instances. The following base set of scopes is always @@ -291,7 +329,7 @@ message GceClusterConfig { // * https://www.googleapis.com/auth/bigtable.admin.table // * https://www.googleapis.com/auth/bigtable.data // * https://www.googleapis.com/auth/devstorage.full_control - repeated string service_account_scopes = 3; + repeated string service_account_scopes = 3 [(google.api.field_behavior) = OPTIONAL]; // The Compute Engine tags to add to all instances (see // [Tagging instances](/compute/docs/label-or-tag-resources#tags)). @@ -303,24 +341,24 @@ message GceClusterConfig { map metadata = 5; // Optional. Reservation Affinity for consuming Zonal reservation. - ReservationAffinity reservation_affinity = 11; + ReservationAffinity reservation_affinity = 11 [(google.api.field_behavior) = OPTIONAL]; } -// Optional. The config settings for Compute Engine resources in +// The config settings for Compute Engine resources in // an instance group, such as a master or worker group. message InstanceGroupConfig { // Optional. The number of VM instances in the instance group. // For master instance groups, must be set to 1. - int32 num_instances = 1; + int32 num_instances = 1 [(google.api.field_behavior) = OPTIONAL]; // Output only. The list of instance names. Cloud Dataproc derives the names // from `cluster_name`, `num_instances`, and the instance group. - repeated string instance_names = 2; + repeated string instance_names = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The Compute Engine image resource used for cluster // instances. It can be specified or may be inferred from // `SoftwareConfig.image_version`. - string image_uri = 3; + string image_uri = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine machine type used for cluster instances. // @@ -335,28 +373,25 @@ message InstanceGroupConfig { // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) // feature, you must use the short name of the machine type // resource, for example, `n1-standard-2`. - string machine_type_uri = 4; + string machine_type_uri = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. Disk option config settings. - DiskConfig disk_config = 5; + DiskConfig disk_config = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. Specifies that this instance group contains preemptible // instances. - bool is_preemptible = 6; + bool is_preemptible = 6 [(google.api.field_behavior) = OPTIONAL]; // Output only. The config for Compute Engine Instance Group // Manager that manages this group. // This is only used for preemptible instance groups. - ManagedGroupConfig managed_group_config = 7; + ManagedGroupConfig managed_group_config = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The Compute Engine accelerator configuration for these // instances. - // - // **Beta Feature**: This feature is still under development. It may be - // changed before final release. - repeated AcceleratorConfig accelerators = 8; + repeated AcceleratorConfig accelerators = 8 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Specifies the minimum cpu platform for the Instance Group. + // Specifies the minimum cpu platform for the Instance Group. // See [Cloud Dataproc→Minimum CPU Platform] // (/dataproc/docs/concepts/compute/dataproc-min-cpu). string min_cpu_platform = 9; @@ -366,10 +401,10 @@ message InstanceGroupConfig { message ManagedGroupConfig { // Output only. The name of the Instance Template used for the Managed // Instance Group. - string instance_template_name = 1; + string instance_template_name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The name of the Instance Group Manager for this group. - string instance_group_manager_name = 2; + string instance_group_manager_name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Specifies the type and number of accelerator cards attached to the instances @@ -401,12 +436,12 @@ message DiskConfig { // Optional. Type of the boot disk (default is "pd-standard"). // Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or // "pd-standard" (Persistent Disk Hard Disk Drive). - string boot_disk_type = 3; + string boot_disk_type = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Size in GB of the boot disk (default is 500GB). - int32 boot_disk_size_gb = 1; + int32 boot_disk_size_gb = 1 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Number of attached SSDs, from 0 to 4 (default is 0). + // Number of attached SSDs, from 0 to 4 (default is 0). // If SSDs are not attached, the boot disk is used to store runtime logs and // [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. // If one or more SSDs are attached, this runtime bulk @@ -423,9 +458,9 @@ message LifecycleConfig { // // Example: **"10m"**, the minimum value, to delete the // cluster when it has had no jobs running for 10 minutes. - google.protobuf.Duration idle_delete_ttl = 1; + google.protobuf.Duration idle_delete_ttl = 1 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Either the exact time the cluster should be deleted at or + // Either the exact time the cluster should be deleted at or // the cluster maximum age. oneof ttl { // Optional. The time when cluster will be auto-deleted. @@ -437,6 +472,10 @@ message LifecycleConfig { // Example: **"1d"**, to delete the cluster 1 day after its creation.. google.protobuf.Duration auto_delete_ttl = 3; } + + // Output only. The time when cluster became idle (most recent job finished) + // and became eligible for deletion due to idleness. + google.protobuf.Timestamp idle_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Security related configuration, including encryption, Kerberos, etc. @@ -448,79 +487,83 @@ message SecurityConfig { // Specifies Kerberos related configuration. message KerberosConfig { // Optional. Flag to indicate whether to Kerberize the cluster. - bool enable_kerberos = 1; + bool enable_kerberos = 1 [(google.api.field_behavior) = OPTIONAL]; // Required. The Cloud Storage URI of a KMS encrypted file containing the root // principal password. - string root_principal_password_uri = 2; + string root_principal_password_uri = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The uri of the KMS key used to encrypt various sensitive // files. - string kms_key_uri = 3; + string kms_key_uri = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. The Cloud Storage URI of the keystore file used for SSL // encryption. If not provided, Dataproc will provide a self-signed // certificate. - string keystore_uri = 4; + string keystore_uri = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of the truststore file used for SSL // encryption. If not provided, Dataproc will provide a self-signed // certificate. - string truststore_uri = 5; + string truststore_uri = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of a KMS encrypted file containing the // password to the user provided keystore. For the self-signed certificate, // this password is generated by Dataproc. - string keystore_password_uri = 6; + string keystore_password_uri = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of a KMS encrypted file containing the // password to the user provided key. For the self-signed certificate, this // password is generated by Dataproc. - string key_password_uri = 7; + string key_password_uri = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of a KMS encrypted file containing the // password to the user provided truststore. For the self-signed certificate, // this password is generated by Dataproc. - string truststore_password_uri = 8; + string truststore_password_uri = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. The remote realm the Dataproc on-cluster KDC will trust, should // the user enable cross realm trust. - string cross_realm_trust_realm = 9; + string cross_realm_trust_realm = 9 [(google.api.field_behavior) = OPTIONAL]; // Optional. The KDC (IP or hostname) for the remote trusted realm in a cross // realm trust relationship. - string cross_realm_trust_kdc = 10; + string cross_realm_trust_kdc = 10 [(google.api.field_behavior) = OPTIONAL]; // Optional. The admin server (IP or hostname) for the remote trusted realm in // a cross realm trust relationship. - string cross_realm_trust_admin_server = 11; + string cross_realm_trust_admin_server = 11 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of a KMS encrypted file containing the // shared password between the on-cluster Kerberos realm and the remote // trusted realm, in a cross realm trust relationship. - string cross_realm_trust_shared_password_uri = 12; + string cross_realm_trust_shared_password_uri = 12 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of a KMS encrypted file containing the // master key of the KDC database. - string kdc_db_key_uri = 13; + string kdc_db_key_uri = 13 [(google.api.field_behavior) = OPTIONAL]; // Optional. The lifetime of the ticket granting ticket, in hours. // If not specified, or user specifies 0, then default value 10 // will be used. - int32 tgt_lifetime_hours = 14; + int32 tgt_lifetime_hours = 14 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The name of the on-cluster Kerberos realm. + // If not specified, the uppercased domain of hostnames will be the realm. + string realm = 15 [(google.api.field_behavior) = OPTIONAL]; } // Specifies an executable to run on a fully configured node and a // timeout period for executable completion. message NodeInitializationAction { // Required. Cloud Storage URI of executable file. - string executable_file = 1; + string executable_file = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Amount of time executable has to complete. Default is // 10 minutes. Cluster creation fails with an explanatory error message (the // name of the executable that caused the error and the exceeded timeout // period) if the executable is not completed at end of the timeout period. - google.protobuf.Duration execution_timeout = 2; + google.protobuf.Duration execution_timeout = 2 [(google.api.field_behavior) = OPTIONAL]; } // The status of a cluster and its instances. @@ -566,17 +609,17 @@ message ClusterStatus { } // Output only. The cluster's state. - State state = 1; + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Optional details of cluster's state. - string detail = 2; + string detail = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Time when this state was entered. - google.protobuf.Timestamp state_start_time = 3; + google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Additional state information that includes // status reported by the agent. - Substate substate = 4; + Substate substate = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Specifies the selection and config of software inside the cluster. @@ -588,7 +631,7 @@ message SoftwareConfig { // ["preview" // version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). // If unspecified, it defaults to the latest Debian version. - string image_version = 1; + string image_version = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The properties to set on daemon config files. // @@ -608,7 +651,7 @@ message SoftwareConfig { // // For more information, see // [Cluster properties](/dataproc/docs/concepts/cluster-properties). - map properties = 2; + map properties = 2 [(google.api.field_behavior) = OPTIONAL]; // The set of optional components to activate on the cluster. repeated Component optional_components = 3; @@ -630,13 +673,13 @@ message ClusterMetrics { message CreateClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster to create. - Cluster cluster = 2; + Cluster cluster = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server // receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests with the same @@ -649,23 +692,23 @@ message CreateClusterRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4; + string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; } // A request to update a cluster. message UpdateClusterRequest { // Required. The ID of the Google Cloud Platform project the // cluster belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 5; + string region = 5 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The changes to the cluster. - Cluster cluster = 3; + Cluster cluster = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. Timeout for graceful YARN decomissioning. Graceful // decommissioning allows removing nodes from the cluster without @@ -675,7 +718,7 @@ message UpdateClusterRequest { // the maximum allowed timeout is 1 day. // // Only supported on Dataproc image versions 1.2 and higher. - google.protobuf.Duration graceful_decommission_timeout = 6; + google.protobuf.Duration graceful_decommission_timeout = 6 [(google.api.field_behavior) = OPTIONAL]; // Required. Specifies the path, relative to `Cluster`, of // the field to update. For example, to change the number of workers @@ -737,7 +780,7 @@ message UpdateClusterRequest { // autoscaling policies // // - google.protobuf.FieldMask update_mask = 4; + google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server // receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests with the same @@ -750,24 +793,24 @@ message UpdateClusterRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 7; + string request_id = 7 [(google.api.field_behavior) = OPTIONAL]; } // A request to delete a cluster. message DeleteClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. Specifying the `cluster_uuid` means the RPC should fail // (with error NOT_FOUND) if cluster with specified UUID does not exist. - string cluster_uuid = 4; + string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A unique id used to identify the request. If the server // receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests with the same @@ -780,32 +823,32 @@ message DeleteClusterRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5; + string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; } // Request to get the resource representation for a cluster in a project. message GetClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to list the clusters in a project. message ListClustersRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 4; + string region = 4 [(google.api.field_behavior) = REQUIRED]; - // Optional. A filter constraining the clusters to list. Filters are + // Optional. A filter constraining the clusters to list. Filters are // case-sensitive and have the following syntax: // // field = value [AND [field = value]] ... @@ -824,37 +867,37 @@ message ListClustersRequest { // // status.state = ACTIVE AND clusterName = mycluster // AND labels.env = staging AND labels.starred = * - string filter = 5; + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. The standard List page size. - int32 page_size = 2; + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The standard List page token. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; } // The list of all clusters in a project. message ListClustersResponse { // Output only. The clusters in the project. - repeated Cluster clusters = 1; + repeated Cluster clusters = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. This token is included in the response if there are more // results to fetch. To fetch additional results, provide this value as the // `page_token` in a subsequent ListClustersRequest. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to collect cluster diagnostic information. message DiagnoseClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; } // The location of diagnostic output. @@ -862,7 +905,7 @@ message DiagnoseClusterResults { // Output only. The Cloud Storage URI of the diagnostic output. // The output report is a plain text file with a summary of collected // diagnostics. - string output_uri = 1; + string output_uri = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Reservation Affinity for consuming Zonal reservation. @@ -883,11 +926,11 @@ message ReservationAffinity { } // Optional. Type of reservation to consume - Type consume_reservation_type = 1; + Type consume_reservation_type = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. Corresponds to the label key of reservation resource. - string key = 2; + string key = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. Corresponds to the label values of reservation resource. - repeated string values = 3; + repeated string values = 3 [(google.api.field_behavior) = OPTIONAL]; } diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py index bfe5208a5e7e..48f0feafeee9 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py @@ -17,6 +17,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.dataproc_v1beta2.proto import ( operations_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_operations__pb2, ) @@ -39,10 +41,12 @@ "\n!com.google.cloud.dataproc.v1beta2B\rClustersProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" ), serialized_pb=_b( - '\n2google/cloud/dataproc_v1beta2/proto/clusters.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x34google/cloud/dataproc_v1beta2/proto/operations.proto\x1a\x30google/cloud/dataproc_v1beta2/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xbe\x03\n\x07\x43luster\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12<\n\x06\x63onfig\x18\x03 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterConfig\x12\x42\n\x06labels\x18\x08 \x03(\x0b\x32\x32.google.cloud.dataproc.v1beta2.Cluster.LabelsEntry\x12<\n\x06status\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatus\x12\x44\n\x0estatus_history\x18\x07 \x03(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatus\x12\x14\n\x0c\x63luster_uuid\x18\x06 \x01(\t\x12>\n\x07metrics\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.ClusterMetrics\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf3\x06\n\rClusterConfig\x12\x15\n\rconfig_bucket\x18\x01 \x01(\t\x12K\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.GceClusterConfig\x12I\n\rmaster_config\x18\t \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfig\x12I\n\rworker_config\x18\n \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfig\x12S\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfig\x12\x46\n\x0fsoftware_config\x18\r \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SoftwareConfig\x12H\n\x10lifecycle_config\x18\x0e \x01(\x0b\x32..google.cloud.dataproc.v1beta2.LifecycleConfig\x12W\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.NodeInitializationAction\x12J\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.EncryptionConfig\x12L\n\x12\x61utoscaling_config\x18\x10 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingConfig\x12\x46\n\x0f\x65ndpoint_config\x18\x11 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.EndpointConfig\x12\x46\n\x0fsecurity_config\x18\x12 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SecurityConfig"\xb5\x01\n\x0e\x45ndpointConfig\x12P\n\nhttp_ports\x18\x01 \x03(\x0b\x32<.google.cloud.dataproc.v1beta2.EndpointConfig.HttpPortsEntry\x12\x1f\n\x17\x65nable_http_port_access\x18\x02 \x01(\x08\x1a\x30\n\x0eHttpPortsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\'\n\x11\x41utoscalingConfig\x12\x12\n\npolicy_uri\x18\x01 \x01(\t"/\n\x10\x45ncryptionConfig\x12\x1b\n\x13gce_pd_kms_key_name\x18\x01 \x01(\t"\x86\x03\n\x10GceClusterConfig\x12\x10\n\x08zone_uri\x18\x01 \x01(\t\x12\x13\n\x0bnetwork_uri\x18\x02 \x01(\t\x12\x16\n\x0esubnetwork_uri\x18\x06 \x01(\t\x12\x18\n\x10internal_ip_only\x18\x07 \x01(\x08\x12\x17\n\x0fservice_account\x18\x08 \x01(\t\x12\x1e\n\x16service_account_scopes\x18\x03 \x03(\t\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12O\n\x08metadata\x18\x05 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.GceClusterConfig.MetadataEntry\x12P\n\x14reservation_affinity\x18\x0b \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ReservationAffinity\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xfc\x02\n\x13InstanceGroupConfig\x12\x15\n\rnum_instances\x18\x01 \x01(\x05\x12\x16\n\x0einstance_names\x18\x02 \x03(\t\x12\x11\n\timage_uri\x18\x03 \x01(\t\x12\x18\n\x10machine_type_uri\x18\x04 \x01(\t\x12>\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.DiskConfig\x12\x16\n\x0eis_preemptible\x18\x06 \x01(\x08\x12O\n\x14managed_group_config\x18\x07 \x01(\x0b\x32\x31.google.cloud.dataproc.v1beta2.ManagedGroupConfig\x12\x46\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AcceleratorConfig\x12\x18\n\x10min_cpu_platform\x18\t \x01(\t"Y\n\x12ManagedGroupConfig\x12\x1e\n\x16instance_template_name\x18\x01 \x01(\t\x12#\n\x1binstance_group_manager_name\x18\x02 \x01(\t"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"W\n\nDiskConfig\x12\x16\n\x0e\x62oot_disk_type\x18\x03 \x01(\t\x12\x19\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x12\x16\n\x0enum_local_ssds\x18\x02 \x01(\x05"\xba\x01\n\x0fLifecycleConfig\x12\x32\n\x0fidle_delete_ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x36\n\x10\x61uto_delete_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x34\n\x0f\x61uto_delete_ttl\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x42\x05\n\x03ttl"X\n\x0eSecurityConfig\x12\x46\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.KerberosConfig"\xb6\x03\n\x0eKerberosConfig\x12\x17\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x12#\n\x1broot_principal_password_uri\x18\x02 \x01(\t\x12\x13\n\x0bkms_key_uri\x18\x03 \x01(\t\x12\x14\n\x0ckeystore_uri\x18\x04 \x01(\t\x12\x16\n\x0etruststore_uri\x18\x05 \x01(\t\x12\x1d\n\x15keystore_password_uri\x18\x06 \x01(\t\x12\x18\n\x10key_password_uri\x18\x07 \x01(\t\x12\x1f\n\x17truststore_password_uri\x18\x08 \x01(\t\x12\x1f\n\x17\x63ross_realm_trust_realm\x18\t \x01(\t\x12\x1d\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\t\x12&\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\t\x12-\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\t\x12\x16\n\x0ekdc_db_key_uri\x18\r \x01(\t\x12\x1a\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05"i\n\x18NodeInitializationAction\x12\x17\n\x0f\x65xecutable_file\x18\x01 \x01(\t\x12\x34\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xf7\x02\n\rClusterStatus\x12\x41\n\x05state\x18\x01 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.ClusterStatus.State\x12\x0e\n\x06\x64\x65tail\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12G\n\x08substate\x18\x04 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.ClusterStatus.Substate"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"\xf4\x01\n\x0eSoftwareConfig\x12\x15\n\rimage_version\x18\x01 \x01(\t\x12Q\n\nproperties\x18\x02 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.SoftwareConfig.PropertiesEntry\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32(.google.cloud.dataproc.v1beta2.Component\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa4\x02\n\x0e\x43lusterMetrics\x12T\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.HdfsMetricsEntry\x12T\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x87\x01\n\x14\x43reateClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x37\n\x07\x63luster\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.Cluster\x12\x12\n\nrequest_id\x18\x04 \x01(\t"\x90\x02\n\x14UpdateClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x05 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x37\n\x07\x63luster\x18\x03 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.Cluster\x12@\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x12\n\nrequest_id\x18\x07 \x01(\t"z\n\x14\x44\x65leteClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x04 \x01(\t\x12\x12\n\nrequest_id\x18\x05 \x01(\t"M\n\x11GetClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t"p\n\x13ListClustersRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x04 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"i\n\x14ListClustersResponse\x12\x38\n\x08\x63lusters\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1beta2.Cluster\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"R\n\x16\x44iagnoseClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t",\n\x16\x44iagnoseClusterResults\x12\x12\n\noutput_uri\x18\x01 \x01(\t"\xee\x01\n\x13ReservationAffinity\x12Y\n\x18\x63onsume_reservation_type\x18\x01 \x01(\x0e\x32\x37.google.cloud.dataproc.v1beta2.ReservationAffinity.Type\x12\x0b\n\x03key\x18\x02 \x01(\t\x12\x0e\n\x06values\x18\x03 \x03(\t"_\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eNO_RESERVATION\x10\x01\x12\x13\n\x0f\x41NY_RESERVATION\x10\x02\x12\x18\n\x14SPECIFIC_RESERVATION\x10\x03\x32\xf8\x08\n\x11\x43lusterController\x12\xae\x01\n\rCreateCluster\x12\x33.google.cloud.dataproc.v1beta2.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"I\x82\xd3\xe4\x93\x02\x43"8/v1beta2/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\x12\xbd\x01\n\rUpdateCluster\x12\x33.google.cloud.dataproc.v1beta2.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"X\x82\xd3\xe4\x93\x02R2G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\x12\xb4\x01\n\rDeleteCluster\x12\x33.google.cloud.dataproc.v1beta2.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"O\x82\xd3\xe4\x93\x02I*G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\x12\xb7\x01\n\nGetCluster\x12\x30.google.cloud.dataproc.v1beta2.GetClusterRequest\x1a&.google.cloud.dataproc.v1beta2.Cluster"O\x82\xd3\xe4\x93\x02I\x12G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\x12\xb9\x01\n\x0cListClusters\x12\x32.google.cloud.dataproc.v1beta2.ListClustersRequest\x1a\x33.google.cloud.dataproc.v1beta2.ListClustersResponse"@\x82\xd3\xe4\x93\x02:\x12\x38/v1beta2/projects/{project_id}/regions/{region}/clusters\x12\xc4\x01\n\x0f\x44iagnoseCluster\x12\x35.google.cloud.dataproc.v1beta2.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"[\x82\xd3\xe4\x93\x02U"P/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*B{\n!com.google.cloud.dataproc.v1beta2B\rClustersProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' + '\n2google/cloud/dataproc_v1beta2/proto/clusters.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x34google/cloud/dataproc_v1beta2/proto/operations.proto\x1a\x30google/cloud/dataproc_v1beta2/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe6\x03\n\x07\x43luster\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\x06\x63onfig\x18\x03 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterConfigB\x03\xe0\x41\x02\x12G\n\x06labels\x18\x08 \x03(\x0b\x32\x32.google.cloud.dataproc.v1beta2.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12\x41\n\x06status\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatusB\x03\xe0\x41\x03\x12I\n\x0estatus_history\x18\x07 \x03(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x07metrics\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.ClusterMetricsB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xaf\x07\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12P\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.GceClusterConfigB\x03\xe0\x41\x01\x12N\n\rmaster_config\x18\t \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12N\n\rworker_config\x18\n \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12X\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12K\n\x0fsoftware_config\x18\r \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SoftwareConfigB\x03\xe0\x41\x01\x12M\n\x10lifecycle_config\x18\x0e \x01(\x0b\x32..google.cloud.dataproc.v1beta2.LifecycleConfigB\x03\xe0\x41\x01\x12\\\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.NodeInitializationActionB\x03\xe0\x41\x01\x12O\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.EncryptionConfigB\x03\xe0\x41\x01\x12Q\n\x12\x61utoscaling_config\x18\x10 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingConfigB\x03\xe0\x41\x01\x12K\n\x0f\x65ndpoint_config\x18\x11 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.EndpointConfigB\x03\xe0\x41\x01\x12K\n\x0fsecurity_config\x18\x12 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SecurityConfigB\x03\xe0\x41\x01"\xbf\x01\n\x0e\x45ndpointConfig\x12U\n\nhttp_ports\x18\x01 \x03(\x0b\x32<.google.cloud.dataproc.v1beta2.EndpointConfig.HttpPortsEntryB\x03\xe0\x41\x03\x12$\n\x17\x65nable_http_port_access\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01\x1a\x30\n\x0eHttpPortsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\xa9\x03\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12O\n\x08metadata\x18\x05 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.GceClusterConfig.MetadataEntry\x12U\n\x14reservation_affinity\x18\x0b \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ReservationAffinityB\x03\xe0\x41\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa4\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x43\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x01\x12T\n\x14managed_group_config\x18\x07 \x01(\x0b\x32\x31.google.cloud.dataproc.v1beta2.ManagedGroupConfigB\x03\xe0\x41\x03\x12K\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AcceleratorConfigB\x03\xe0\x41\x01\x12\x18\n\x10min_cpu_platform\x18\t \x01(\t"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"a\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x16\n\x0enum_local_ssds\x18\x02 \x01(\x05"\xf9\x01\n\x0fLifecycleConfig\x12\x37\n\x0fidle_delete_ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x36\n\x10\x61uto_delete_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x34\n\x0f\x61uto_delete_ttl\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x38\n\x0fidle_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x42\x05\n\x03ttl"X\n\x0eSecurityConfig\x12\x46\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x8b\x03\n\rClusterStatus\x12\x46\n\x05state\x18\x01 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x13\n\x06\x64\x65tail\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12L\n\x08substate\x18\x04 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"\xfe\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12V\n\nproperties\x18\x02 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32(.google.cloud.dataproc.v1beta2.Component\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa4\x02\n\x0e\x43lusterMetrics\x12T\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.HdfsMetricsEntry\x12T\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x9b\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12<\n\x07\x63luster\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xb3\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x07\x63luster\x18\x03 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"s\n\x14ListClustersResponse\x12=\n\x08\x63lusters\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03"\xfd\x01\n\x13ReservationAffinity\x12^\n\x18\x63onsume_reservation_type\x18\x01 \x01(\x0e\x32\x37.google.cloud.dataproc.v1beta2.ReservationAffinity.TypeB\x03\xe0\x41\x01\x12\x10\n\x03key\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06values\x18\x03 \x03(\tB\x03\xe0\x41\x01"_\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eNO_RESERVATION\x10\x01\x12\x13\n\x0f\x41NY_RESERVATION\x10\x02\x12\x18\n\x14SPECIFIC_RESERVATION\x10\x03\x32\xe7\r\n\x11\x43lusterController\x12\x91\x02\n\rCreateCluster\x12\x33.google.cloud.dataproc.v1beta2.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\xab\x01\x82\xd3\xe4\x93\x02\x43"8/v1beta2/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x1bproject_id, region, cluster\xca\x41\x41\n\x07\x43luster\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xbb\x02\n\rUpdateCluster\x12\x33.google.cloud.dataproc.v1beta2.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\xd5\x01\x82\xd3\xe4\x93\x02R2G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xda\x41\x36project_id, region, cluster_name, cluster, update_mask\xca\x41\x41\n\x07\x43luster\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xaa\x02\n\rDeleteCluster\x12\x33.google.cloud.dataproc.v1beta2.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xc4\x01\x82\xd3\xe4\x93\x02I*G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41 project_id, region, cluster_name\xca\x41O\n\x15google.protobuf.Empty\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xda\x01\n\nGetCluster\x12\x30.google.cloud.dataproc.v1beta2.GetClusterRequest\x1a&.google.cloud.dataproc.v1beta2.Cluster"r\x82\xd3\xe4\x93\x02I\x12G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41 project_id, region, cluster_name\x12\xeb\x01\n\x0cListClusters\x12\x32.google.cloud.dataproc.v1beta2.ListClustersRequest\x1a\x33.google.cloud.dataproc.v1beta2.ListClustersResponse"r\x82\xd3\xe4\x93\x02:\x12\x38/v1beta2/projects/{project_id}/regions/{region}/clusters\xda\x41\x12project_id, region\xda\x41\x1aproject_id, region, filter\x12\xba\x02\n\x0f\x44iagnoseCluster\x12\x35.google.cloud.dataproc.v1beta2.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xd0\x01\x82\xd3\xe4\x93\x02U"P/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41 project_id, region, cluster_name\xca\x41O\n\x15google.protobuf.Empty\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB{\n!com.google.cloud.dataproc.v1beta2B\rClustersProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_operations__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_shared__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, @@ -80,8 +84,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4053, - serialized_end=4139, + serialized_start=4509, + serialized_end=4595, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_STATE) @@ -103,8 +107,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4141, - serialized_end=4201, + serialized_start=4597, + serialized_end=4657, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_SUBSTATE) @@ -141,8 +145,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5856, - serialized_end=5951, + serialized_start=6489, + serialized_end=6584, ) _sym_db.RegisterEnumDescriptor(_RESERVATIONAFFINITY_TYPE) @@ -199,8 +203,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=757, - serialized_end=802, + serialized_start=855, + serialized_end=900, ) _CLUSTER = _descriptor.Descriptor( @@ -225,7 +229,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -243,7 +247,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -261,7 +265,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -279,7 +283,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -297,7 +301,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -315,7 +319,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -333,7 +337,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -351,7 +355,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -363,8 +367,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=356, - serialized_end=802, + serialized_start=414, + serialized_end=900, ) @@ -390,7 +394,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -408,7 +412,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -426,7 +430,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -444,7 +448,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -462,7 +466,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -480,7 +484,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -498,7 +502,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -516,7 +520,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -534,7 +538,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -552,7 +556,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -570,7 +574,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -588,7 +592,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -600,8 +604,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=805, - serialized_end=1688, + serialized_start=903, + serialized_end=1846, ) @@ -657,8 +661,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1824, - serialized_end=1872, + serialized_start=1992, + serialized_end=2040, ) _ENDPOINTCONFIG = _descriptor.Descriptor( @@ -683,7 +687,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -701,7 +705,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -713,8 +717,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1691, - serialized_end=1872, + serialized_start=1849, + serialized_end=2040, ) @@ -740,7 +744,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -752,8 +756,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1874, - serialized_end=1913, + serialized_start=2042, + serialized_end=2086, ) @@ -779,7 +783,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -791,8 +795,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1915, - serialized_end=1962, + serialized_start=2088, + serialized_end=2140, ) @@ -848,8 +852,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2308, - serialized_end=2355, + serialized_start=2521, + serialized_end=2568, ) _GCECLUSTERCONFIG = _descriptor.Descriptor( @@ -874,7 +878,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -892,7 +896,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -910,7 +914,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -928,7 +932,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -946,7 +950,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -964,7 +968,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1018,7 +1022,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1030,8 +1034,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1965, - serialized_end=2355, + serialized_start=2143, + serialized_end=2568, ) @@ -1057,7 +1061,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1075,7 +1079,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1093,7 +1097,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1111,7 +1115,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1129,7 +1133,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1147,7 +1151,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1165,7 +1169,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1183,7 +1187,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1213,8 +1217,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2358, - serialized_end=2738, + serialized_start=2571, + serialized_end=2991, ) @@ -1240,7 +1244,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1258,7 +1262,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1270,8 +1274,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2740, - serialized_end=2829, + serialized_start=2993, + serialized_end=3092, ) @@ -1327,8 +1331,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2831, - serialized_end=2907, + serialized_start=3094, + serialized_end=3170, ) @@ -1354,7 +1358,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1372,7 +1376,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1402,8 +1406,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2909, - serialized_end=2996, + serialized_start=3172, + serialized_end=3269, ) @@ -1429,7 +1433,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1468,6 +1472,24 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="idle_start_time", + full_name="google.cloud.dataproc.v1beta2.LifecycleConfig.idle_start_time", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -1485,8 +1507,8 @@ fields=[], ) ], - serialized_start=2999, - serialized_end=3185, + serialized_start=3272, + serialized_end=3521, ) @@ -1524,8 +1546,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3187, - serialized_end=3275, + serialized_start=3523, + serialized_end=3611, ) @@ -1551,7 +1573,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1569,7 +1591,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1587,7 +1609,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1605,7 +1627,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1623,7 +1645,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1641,7 +1663,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1659,7 +1681,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1677,7 +1699,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1695,7 +1717,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1713,7 +1735,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1731,7 +1753,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1749,7 +1771,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1767,7 +1789,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1785,7 +1807,25 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="realm", + full_name="google.cloud.dataproc.v1beta2.KerberosConfig.realm", + index=14, + number=15, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1797,8 +1837,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3278, - serialized_end=3716, + serialized_start=3614, + serialized_end=4142, ) @@ -1824,7 +1864,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1842,7 +1882,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1854,8 +1894,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3718, - serialized_end=3823, + serialized_start=4144, + serialized_end=4259, ) @@ -1881,7 +1921,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1899,7 +1939,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1917,7 +1957,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1935,7 +1975,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1947,8 +1987,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3826, - serialized_end=4201, + serialized_start=4262, + serialized_end=4657, ) @@ -2004,8 +2044,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4399, - serialized_end=4448, + serialized_start=4865, + serialized_end=4914, ) _SOFTWARECONFIG = _descriptor.Descriptor( @@ -2030,7 +2070,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2048,7 +2088,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2078,8 +2118,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4204, - serialized_end=4448, + serialized_start=4660, + serialized_end=4914, ) @@ -2135,8 +2175,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4641, - serialized_end=4691, + serialized_start=5107, + serialized_end=5157, ) _CLUSTERMETRICS_YARNMETRICSENTRY = _descriptor.Descriptor( @@ -2191,8 +2231,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4693, - serialized_end=4743, + serialized_start=5159, + serialized_end=5209, ) _CLUSTERMETRICS = _descriptor.Descriptor( @@ -2247,8 +2287,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4451, - serialized_end=4743, + serialized_start=4917, + serialized_end=5209, ) @@ -2274,7 +2314,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2292,7 +2332,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2310,7 +2350,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2328,7 +2368,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2340,8 +2380,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4746, - serialized_end=4881, + serialized_start=5212, + serialized_end=5367, ) @@ -2367,7 +2407,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2385,7 +2425,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2403,7 +2443,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2421,7 +2461,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2439,7 +2479,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2457,7 +2497,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2475,7 +2515,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2487,8 +2527,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4884, - serialized_end=5156, + serialized_start=5370, + serialized_end=5677, ) @@ -2514,7 +2554,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2532,7 +2572,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2550,7 +2590,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2568,7 +2608,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2586,7 +2626,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2598,8 +2638,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5158, - serialized_end=5280, + serialized_start=5680, + serialized_end=5827, ) @@ -2625,7 +2665,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2643,7 +2683,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2661,7 +2701,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2673,8 +2713,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5282, - serialized_end=5359, + serialized_start=5829, + serialized_end=5921, ) @@ -2700,7 +2740,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2718,7 +2758,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2736,7 +2776,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2754,7 +2794,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2772,7 +2812,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2784,8 +2824,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5361, - serialized_end=5473, + serialized_start=5924, + serialized_end=6061, ) @@ -2811,7 +2851,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2829,7 +2869,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2841,8 +2881,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5475, - serialized_end=5580, + serialized_start=6063, + serialized_end=6178, ) @@ -2868,7 +2908,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2886,7 +2926,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2904,7 +2944,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2916,8 +2956,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5582, - serialized_end=5664, + serialized_start=6180, + serialized_end=6277, ) @@ -2943,7 +2983,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ) ], @@ -2955,8 +2995,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5666, - serialized_end=5710, + serialized_start=6279, + serialized_end=6328, ) @@ -2982,7 +3022,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3000,7 +3040,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3018,7 +3058,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -3030,8 +3070,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5713, - serialized_end=5951, + serialized_start=6331, + serialized_end=6584, ) _CLUSTER_LABELSENTRY.containing_type = _CLUSTER @@ -3080,6 +3120,9 @@ _LIFECYCLECONFIG.fields_by_name[ "auto_delete_ttl" ].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_LIFECYCLECONFIG.fields_by_name[ + "idle_start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LIFECYCLECONFIG.oneofs_by_name["ttl"].fields.append( _LIFECYCLECONFIG.fields_by_name["auto_delete_time"] ) @@ -3458,8 +3501,8 @@ dict( DESCRIPTOR=_INSTANCEGROUPCONFIG, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Optional. The config settings for Compute Engine resources in an - instance group, such as a master or worker group. + __doc__="""The config settings for Compute Engine resources in an instance group, + such as a master or worker group. Attributes: @@ -3497,11 +3540,10 @@ preemptible instance groups. accelerators: Optional. The Compute Engine accelerator configuration for - these instances. **Beta Feature**: This feature is still - under development. It may be changed before final release. + these instances. min_cpu_platform: - Optional. Specifies the minimum cpu platform for the Instance - Group. See [Cloud Dataproc→Minimum CPU Platform] + Specifies the minimum cpu platform for the Instance Group. See + [Cloud Dataproc→Minimum CPU Platform] (/dataproc/docs/concepts/compute/dataproc-min-cpu). """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.InstanceGroupConfig) @@ -3584,13 +3626,12 @@ boot_disk_size_gb: Optional. Size in GB of the boot disk (default is 500GB). num_local_ssds: - Optional. Number of attached SSDs, from 0 to 4 (default is 0). - If SSDs are not attached, the boot disk is used to store - runtime logs and `HDFS `__ data. If one or more SSDs are - attached, this runtime bulk data is spread across them, and - the boot disk contains only basic config and installed - binaries. + Number of attached SSDs, from 0 to 4 (default is 0). If SSDs + are not attached, the boot disk is used to store runtime logs + and `HDFS `__ data. If one or more SSDs are attached, this + runtime bulk data is spread across them, and the boot disk + contains only basic config and installed binaries. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.DiskConfig) ), @@ -3614,8 +3655,8 @@ value, to delete the cluster when it has had no jobs running for 10 minutes. ttl: - Optional. Either the exact time the cluster should be deleted - at or the cluster maximum age. + Either the exact time the cluster should be deleted at or the + cluster maximum age. auto_delete_time: Optional. The time when cluster will be auto-deleted. auto_delete_ttl: @@ -3623,6 +3664,10 @@ be auto-deleted at the end of this period. Valid range: **[10m, 14d]**. Example: **"1d"**, to delete the cluster 1 day after its creation.. + idle_start_time: + Output only. The time when cluster became idle (most recent + job finished) and became eligible for deletion due to + idleness. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.LifecycleConfig) ), @@ -3709,6 +3754,10 @@ Optional. The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used. + realm: + Optional. The name of the on-cluster Kerberos realm. If not + specified, the uppercased domain of hostnames will be the + realm. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.KerberosConfig) ), @@ -4186,20 +4235,123 @@ DESCRIPTOR._options = None _CLUSTER_LABELSENTRY._options = None +_CLUSTER.fields_by_name["project_id"]._options = None +_CLUSTER.fields_by_name["cluster_name"]._options = None +_CLUSTER.fields_by_name["config"]._options = None +_CLUSTER.fields_by_name["labels"]._options = None +_CLUSTER.fields_by_name["status"]._options = None +_CLUSTER.fields_by_name["status_history"]._options = None +_CLUSTER.fields_by_name["cluster_uuid"]._options = None +_CLUSTER.fields_by_name["metrics"]._options = None +_CLUSTERCONFIG.fields_by_name["config_bucket"]._options = None +_CLUSTERCONFIG.fields_by_name["gce_cluster_config"]._options = None +_CLUSTERCONFIG.fields_by_name["master_config"]._options = None +_CLUSTERCONFIG.fields_by_name["worker_config"]._options = None +_CLUSTERCONFIG.fields_by_name["secondary_worker_config"]._options = None +_CLUSTERCONFIG.fields_by_name["software_config"]._options = None +_CLUSTERCONFIG.fields_by_name["lifecycle_config"]._options = None +_CLUSTERCONFIG.fields_by_name["initialization_actions"]._options = None +_CLUSTERCONFIG.fields_by_name["encryption_config"]._options = None +_CLUSTERCONFIG.fields_by_name["autoscaling_config"]._options = None +_CLUSTERCONFIG.fields_by_name["endpoint_config"]._options = None +_CLUSTERCONFIG.fields_by_name["security_config"]._options = None _ENDPOINTCONFIG_HTTPPORTSENTRY._options = None +_ENDPOINTCONFIG.fields_by_name["http_ports"]._options = None +_ENDPOINTCONFIG.fields_by_name["enable_http_port_access"]._options = None +_AUTOSCALINGCONFIG.fields_by_name["policy_uri"]._options = None +_ENCRYPTIONCONFIG.fields_by_name["gce_pd_kms_key_name"]._options = None _GCECLUSTERCONFIG_METADATAENTRY._options = None +_GCECLUSTERCONFIG.fields_by_name["zone_uri"]._options = None +_GCECLUSTERCONFIG.fields_by_name["network_uri"]._options = None +_GCECLUSTERCONFIG.fields_by_name["subnetwork_uri"]._options = None +_GCECLUSTERCONFIG.fields_by_name["internal_ip_only"]._options = None +_GCECLUSTERCONFIG.fields_by_name["service_account"]._options = None +_GCECLUSTERCONFIG.fields_by_name["service_account_scopes"]._options = None +_GCECLUSTERCONFIG.fields_by_name["reservation_affinity"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["num_instances"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["instance_names"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["image_uri"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["machine_type_uri"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["disk_config"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["is_preemptible"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["managed_group_config"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["accelerators"]._options = None +_MANAGEDGROUPCONFIG.fields_by_name["instance_template_name"]._options = None +_MANAGEDGROUPCONFIG.fields_by_name["instance_group_manager_name"]._options = None +_DISKCONFIG.fields_by_name["boot_disk_type"]._options = None +_DISKCONFIG.fields_by_name["boot_disk_size_gb"]._options = None +_LIFECYCLECONFIG.fields_by_name["idle_delete_ttl"]._options = None +_LIFECYCLECONFIG.fields_by_name["idle_start_time"]._options = None +_KERBEROSCONFIG.fields_by_name["enable_kerberos"]._options = None +_KERBEROSCONFIG.fields_by_name["root_principal_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["kms_key_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["keystore_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["truststore_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["keystore_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["key_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["truststore_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_realm"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_kdc"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_admin_server"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_shared_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["kdc_db_key_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["tgt_lifetime_hours"]._options = None +_KERBEROSCONFIG.fields_by_name["realm"]._options = None +_NODEINITIALIZATIONACTION.fields_by_name["executable_file"]._options = None +_NODEINITIALIZATIONACTION.fields_by_name["execution_timeout"]._options = None +_CLUSTERSTATUS.fields_by_name["state"]._options = None +_CLUSTERSTATUS.fields_by_name["detail"]._options = None +_CLUSTERSTATUS.fields_by_name["state_start_time"]._options = None +_CLUSTERSTATUS.fields_by_name["substate"]._options = None _SOFTWARECONFIG_PROPERTIESENTRY._options = None +_SOFTWARECONFIG.fields_by_name["image_version"]._options = None +_SOFTWARECONFIG.fields_by_name["properties"]._options = None _CLUSTERMETRICS_HDFSMETRICSENTRY._options = None _CLUSTERMETRICS_YARNMETRICSENTRY._options = None +_CREATECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_CREATECLUSTERREQUEST.fields_by_name["region"]._options = None +_CREATECLUSTERREQUEST.fields_by_name["cluster"]._options = None +_CREATECLUSTERREQUEST.fields_by_name["request_id"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["region"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["cluster"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["graceful_decommission_timeout"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["update_mask"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["request_id"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["region"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["cluster_uuid"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["request_id"]._options = None +_GETCLUSTERREQUEST.fields_by_name["project_id"]._options = None +_GETCLUSTERREQUEST.fields_by_name["region"]._options = None +_GETCLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["project_id"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["region"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["filter"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["page_size"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["page_token"]._options = None +_LISTCLUSTERSRESPONSE.fields_by_name["clusters"]._options = None +_LISTCLUSTERSRESPONSE.fields_by_name["next_page_token"]._options = None +_DIAGNOSECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_DIAGNOSECLUSTERREQUEST.fields_by_name["region"]._options = None +_DIAGNOSECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_DIAGNOSECLUSTERRESULTS.fields_by_name["output_uri"]._options = None +_RESERVATIONAFFINITY.fields_by_name["consume_reservation_type"]._options = None +_RESERVATIONAFFINITY.fields_by_name["key"]._options = None +_RESERVATIONAFFINITY.fields_by_name["values"]._options = None _CLUSTERCONTROLLER = _descriptor.ServiceDescriptor( name="ClusterController", full_name="google.cloud.dataproc.v1beta2.ClusterController", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=5954, - serialized_end=7098, + serialized_options=_b( + "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=6587, + serialized_end=8354, methods=[ _descriptor.MethodDescriptor( name="CreateCluster", @@ -4209,7 +4361,7 @@ input_type=_CREATECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002C"8/v1beta2/projects/{project_id}/regions/{region}/clusters:\007cluster' + '\202\323\344\223\002C"8/v1beta2/projects/{project_id}/regions/{region}/clusters:\007cluster\332A\033project_id, region, cluster\312AA\n\007Cluster\0226google.cloud.dataproc.v1beta2.ClusterOperationMetadata' ), ), _descriptor.MethodDescriptor( @@ -4220,7 +4372,7 @@ input_type=_UPDATECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\002R2G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster" + "\202\323\344\223\002R2G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster\332A6project_id, region, cluster_name, cluster, update_mask\312AA\n\007Cluster\0226google.cloud.dataproc.v1beta2.ClusterOperationMetadata" ), ), _descriptor.MethodDescriptor( @@ -4231,7 +4383,7 @@ input_type=_DELETECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\002I*G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + "\202\323\344\223\002I*G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\332A project_id, region, cluster_name\312AO\n\025google.protobuf.Empty\0226google.cloud.dataproc.v1beta2.ClusterOperationMetadata" ), ), _descriptor.MethodDescriptor( @@ -4242,7 +4394,7 @@ input_type=_GETCLUSTERREQUEST, output_type=_CLUSTER, serialized_options=_b( - "\202\323\344\223\002I\022G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + "\202\323\344\223\002I\022G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\332A project_id, region, cluster_name" ), ), _descriptor.MethodDescriptor( @@ -4253,7 +4405,7 @@ input_type=_LISTCLUSTERSREQUEST, output_type=_LISTCLUSTERSRESPONSE, serialized_options=_b( - "\202\323\344\223\002:\0228/v1beta2/projects/{project_id}/regions/{region}/clusters" + "\202\323\344\223\002:\0228/v1beta2/projects/{project_id}/regions/{region}/clusters\332A\022project_id, region\332A\032project_id, region, filter" ), ), _descriptor.MethodDescriptor( @@ -4264,7 +4416,7 @@ input_type=_DIAGNOSECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002U"P/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\001*' + '\202\323\344\223\002U"P/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\001*\332A project_id, region, cluster_name\312AO\n\025google.protobuf.Empty\0226google.cloud.dataproc.v1beta2.ClusterOperationMetadata' ), ), ], diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py index aa8da2e86652..de9821404290 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py @@ -58,21 +58,27 @@ class ClusterControllerServicer(object): """ def CreateCluster(self, request, context): - """Creates a cluster in a project. + """Creates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateCluster(self, request, context): - """Updates a cluster in a project. + """Updates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteCluster(self, request, context): - """Deletes a cluster in a project. + """Deletes a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -93,9 +99,13 @@ def ListClusters(self, request, context): raise NotImplementedError("Method not implemented!") def DiagnoseCluster(self, request, context): - """Gets cluster diagnostic information. - After the operation completes, the Operation.response field - contains `DiagnoseClusterOutputLocation`. + """Gets cluster diagnostic information. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + After the operation completes, + [Operation.response][google.longrunning.Operation.response] + contains + [Empty](google.protobuf.Empty). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/jobs.proto index 4d888dafc011..c1e643c92fd1 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs.proto +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/jobs.proto @@ -18,6 +18,8 @@ syntax = "proto3"; package google.cloud.dataproc.v1beta2; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; @@ -29,12 +31,16 @@ option java_package = "com.google.cloud.dataproc.v1beta2"; // The JobController provides methods to manage jobs. service JobController { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Submits a job to a cluster. rpc SubmitJob(SubmitJobRequest) returns (Job) { option (google.api.http) = { post: "/v1beta2/projects/{project_id}/regions/{region}/jobs:submit" body: "*" }; + option (google.api.method_signature) = "project_id, region, job"; } // Gets the resource representation for a job in a project. @@ -42,6 +48,7 @@ service JobController { option (google.api.http) = { get: "/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}" }; + option (google.api.method_signature) = "project_id, region, job_id"; } // Lists regions/{region}/jobs in a project. @@ -49,6 +56,8 @@ service JobController { option (google.api.http) = { get: "/v1beta2/projects/{project_id}/regions/{region}/jobs" }; + option (google.api.method_signature) = "project_id, region"; + option (google.api.method_signature) = "project_id, region, filter"; } // Updates a job in a project. @@ -69,6 +78,7 @@ service JobController { post: "/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" body: "*" }; + option (google.api.method_signature) = "project_id, region, job_id"; } // Deletes the job from the project. If the job is active, the delete fails, @@ -77,6 +87,7 @@ service JobController { option (google.api.http) = { delete: "/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}" }; + option (google.api.method_signature) = "project_id, region, job_id"; } } @@ -176,12 +187,12 @@ message HadoopJob { // A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/) // applications on YARN. +// The specification of the main method to call to drive the job. +// Specify either the jar file that contains the main class or the main class +// name. To pass both a main jar and a main class in that jar, add the jar to +// `CommonJob.jar_file_uris`, and then specify the main class name in +// `main_class`. message SparkJob { - // Required. The specification of the main method to call to drive the job. - // Specify either the jar file that contains the main class or the main class - // name. To pass both a main jar and a main class in that jar, add the jar to - // `CommonJob.jar_file_uris`, and then specify the main class name in - // `main_class`. oneof driver { // The HCFS URI of the jar file that contains the main class. string main_jar_file_uri = 1; @@ -226,7 +237,7 @@ message SparkJob { message PySparkJob { // Required. The HCFS URI of the main Python file to use as the driver. Must // be a .py file. - string main_python_file_uri = 1; + string main_python_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The arguments to pass to the driver. Do not include arguments, // such as `--conf`, that can be set as job properties, since a collision may @@ -275,7 +286,7 @@ message QueryList { // ] // } // } - repeated string queries = 1; + repeated string queries = 1 [(google.api.field_behavior) = REQUIRED]; } // A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/) @@ -383,7 +394,7 @@ message PigJob { message SparkRJob { // Required. The HCFS URI of the main R file to use as the driver. // Must be a .R file. - string main_r_file_uri = 1; + string main_r_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The arguments to pass to the driver. Do not include arguments, // such as `--conf`, that can be set as job properties, since a collision may @@ -412,7 +423,7 @@ message SparkRJob { // Cloud Dataproc job config. message JobPlacement { // Required. The name of the cluster where the job will be submitted. - string cluster_name = 1; + string cluster_name = 1 [(google.api.field_behavior) = REQUIRED]; // Output only. A cluster UUID generated by the Cloud Dataproc service when // the job is submitted. @@ -503,7 +514,7 @@ message JobStatus { message JobReference { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The job ID, which must be unique within the project. // @@ -551,20 +562,20 @@ message YarnApplication { KILLED = 8; } - // Required. The application name. - string name = 1; + // Output only. The application name. + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Required. The application state. - State state = 2; + // Output only. The application state. + State state = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Required. The numerical progress of the application, from 1 to 100. - float progress = 3; + // Output only. The numerical progress of the application, from 1 to 100. + float progress = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or + // Optional. Output only. The HTTP URL of the ApplicationMaster, HistoryServer, or // TimelineServer that provides application-specific information. The URL uses // the internal hostname, and requires a proxy server for resolution and, // possibly, access. - string tracking_url = 4; + string tracking_url = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A Cloud Dataproc job resource. @@ -577,7 +588,7 @@ message Job { // Required. Job information, including how, when, and where to // run the job. - JobPlacement placement = 2; + JobPlacement placement = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The application/framework-specific portion of the job. oneof type_job { @@ -665,13 +676,13 @@ message JobScheduling { message SubmitJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job resource. - Job job = 2; + Job job = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server // receives two [SubmitJobRequest][google.cloud.dataproc.v1beta2.SubmitJobRequest] requests with the same @@ -691,13 +702,13 @@ message SubmitJobRequest { message GetJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to list jobs in a project. @@ -717,10 +728,10 @@ message ListJobsRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 6; + string region = 6 [(google.api.field_behavior) = REQUIRED]; // Optional. The number of results to return in each response. int32 page_size = 2; @@ -760,16 +771,16 @@ message ListJobsRequest { message UpdateJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 2; + string region = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 3; + string job_id = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The changes to the job. - Job job = 4; + Job job = 4 [(google.api.field_behavior) = REQUIRED]; // Required. Specifies the path, relative to Job, of // the field to update. For example, to update the labels of a Job the @@ -777,7 +788,7 @@ message UpdateJobRequest { // labels, and the `PATCH` request body would specify the new // value. Note: Currently, labels is the only // field that can be updated. - google.protobuf.FieldMask update_mask = 5; + google.protobuf.FieldMask update_mask = 5 [(google.api.field_behavior) = REQUIRED]; } // A list of jobs in a project. @@ -795,24 +806,24 @@ message ListJobsResponse { message CancelJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to delete a job. message DeleteJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; } diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py index 6379fd2c8bbc..b25037c0cddf 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py @@ -16,6 +16,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -29,10 +31,12 @@ "\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" ), serialized_pb=_b( - '\n.google/cloud/dataproc_v1beta2/proto/jobs.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcb\x02\n\rLoggingConfig\x12\\\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32\x41.google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry\x1aj\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xdd\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12L\n\nproperties\x18\x07 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xdb\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12K\n\nproperties\x18\x07 \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xda\x02\n\nPySparkJob\x12\x1c\n\x14main_python_file_uri\x18\x01 \x01(\t\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x18\n\x10python_file_uris\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12M\n\nproperties\x18\x07 \x03(\x0b\x32\x39.google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\tQueryList\x12\x0f\n\x07queries\x18\x01 \x03(\t"\xb0\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12U\n\x10script_variables\x18\x04 \x03(\x0b\x32;.google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x36.google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xe5\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12Y\n\x10script_variables\x18\x03 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry\x12N\n\nproperties\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x38 \x03(\t\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf3\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12T\n\x10script_variables\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry\x12I\n\nproperties\x18\x05 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xa2\x02\n\tSparkRJob\x12\x17\n\x0fmain_r_file_uri\x18\x01 \x01(\t\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x11\n\tfile_uris\x18\x03 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x04 \x03(\t\x12L\n\nproperties\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x0cJobPlacement\x12\x14\n\x0c\x63luster_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x02 \x01(\t"\xcc\x03\n\tJobStatus\x12=\n\x05state\x18\x01 \x01(\x0e\x32..google.cloud.dataproc.v1beta2.JobStatus.State\x12\x0f\n\x07\x64\x65tails\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x43\n\x08substate\x18\x07 \x01(\x0e\x32\x31.google.cloud.dataproc.v1beta2.JobStatus.Substate"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"2\n\x0cJobReference\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\x96\x02\n\x0fYarnApplication\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x43\n\x05state\x18\x02 \x01(\x0e\x32\x34.google.cloud.dataproc.v1beta2.YarnApplication.State\x12\x10\n\x08progress\x18\x03 \x01(\x02\x12\x14\n\x0ctracking_url\x18\x04 \x01(\t"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xb3\x08\n\x03Job\x12>\n\treference\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobReference\x12>\n\tplacement\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobPlacement\x12>\n\nhadoop_job\x18\x03 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x06 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12?\n\x0bspark_r_job\x18\x15 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.SparkRJobH\x00\x12\x43\n\rspark_sql_job\x18\x0c \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x38\n\x06status\x18\x08 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12@\n\x0estatus_history\x18\r \x03(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12I\n\x11yarn_applications\x18\t \x03(\x0b\x32..google.cloud.dataproc.v1beta2.YarnApplication\x12\x14\n\x0csubmitted_by\x18\n \x01(\t\x12"\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\t\x12 \n\x18\x64river_control_files_uri\x18\x0f \x01(\t\x12>\n\x06labels\x18\x12 \x03(\x0b\x32..google.cloud.dataproc.v1beta2.Job.LabelsEntry\x12@\n\nscheduling\x18\x14 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x10\n\x08job_uuid\x18\x16 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job".\n\rJobScheduling\x12\x1d\n\x15max_failures_per_hour\x18\x01 \x01(\x05"{\n\x10SubmitJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12/\n\x03job\x18\x02 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12\x12\n\nrequest_id\x18\x04 \x01(\t"C\n\rGetJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\x95\x02\n\x0fListJobsRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x06 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x04 \x01(\t\x12Y\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32>.google.cloud.dataproc.v1beta2.ListJobsRequest.JobStateMatcher\x12\x0e\n\x06\x66ilter\x18\x07 \x01(\t"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xa8\x01\n\x10UpdateJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x02 \x01(\t\x12\x0e\n\x06job_id\x18\x03 \x01(\t\x12/\n\x03job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12/\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"]\n\x10ListJobsResponse\x12\x30\n\x04jobs\x18\x01 \x03(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"F\n\x10\x43\x61ncelJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"F\n\x10\x44\x65leteJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t2\x8b\x08\n\rJobController\x12\xa8\x01\n\tSubmitJob\x12/.google.cloud.dataproc.v1beta2.SubmitJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"F\x82\xd3\xe4\x93\x02@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\x01*\x12\xa1\x01\n\x06GetJob\x12,.google.cloud.dataproc.v1beta2.GetJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"E\x82\xd3\xe4\x93\x02?\x12=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\x12\xa9\x01\n\x08ListJobs\x12..google.cloud.dataproc.v1beta2.ListJobsRequest\x1a/.google.cloud.dataproc.v1beta2.ListJobsResponse"<\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta2/projects/{project_id}/regions/{region}/jobs\x12\xac\x01\n\tUpdateJob\x12/.google.cloud.dataproc.v1beta2.UpdateJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"J\x82\xd3\xe4\x93\x02\x44\x32=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xb1\x01\n\tCancelJob\x12/.google.cloud.dataproc.v1beta2.CancelJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"O\x82\xd3\xe4\x93\x02I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\x12\x9b\x01\n\tDeleteJob\x12/.google.cloud.dataproc.v1beta2.DeleteJobRequest\x1a\x16.google.protobuf.Empty"E\x82\xd3\xe4\x93\x02?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}Bw\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' + '\n.google/cloud/dataproc_v1beta2/proto/jobs.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcb\x02\n\rLoggingConfig\x12\\\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32\x41.google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry\x1aj\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xdd\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12L\n\nproperties\x18\x07 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xdb\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12K\n\nproperties\x18\x07 \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xdf\x02\n\nPySparkJob\x12!\n\x14main_python_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x18\n\x10python_file_uris\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12M\n\nproperties\x18\x07 \x03(\x0b\x32\x39.google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"!\n\tQueryList\x12\x14\n\x07queries\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xb0\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12U\n\x10script_variables\x18\x04 \x03(\x0b\x32;.google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x36.google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xe5\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12Y\n\x10script_variables\x18\x03 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry\x12N\n\nproperties\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x38 \x03(\t\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf3\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12T\n\x10script_variables\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry\x12I\n\nproperties\x18\x05 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xa7\x02\n\tSparkRJob\x12\x1c\n\x0fmain_r_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x11\n\tfile_uris\x18\x03 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x04 \x03(\t\x12L\n\nproperties\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"?\n\x0cJobPlacement\x12\x19\n\x0c\x63luster_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x14\n\x0c\x63luster_uuid\x18\x02 \x01(\t"\xcc\x03\n\tJobStatus\x12=\n\x05state\x18\x01 \x01(\x0e\x32..google.cloud.dataproc.v1beta2.JobStatus.State\x12\x0f\n\x07\x64\x65tails\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x43\n\x08substate\x18\x07 \x01(\x0e\x32\x31.google.cloud.dataproc.v1beta2.JobStatus.Substate"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"7\n\x0cJobReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\xaa\x02\n\x0fYarnApplication\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12H\n\x05state\x18\x02 \x01(\x0e\x32\x34.google.cloud.dataproc.v1beta2.YarnApplication.StateB\x03\xe0\x41\x03\x12\x15\n\x08progress\x18\x03 \x01(\x02\x42\x03\xe0\x41\x03\x12\x19\n\x0ctracking_url\x18\x04 \x01(\tB\x03\xe0\x41\x03"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xb8\x08\n\x03Job\x12>\n\treference\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobReference\x12\x43\n\tplacement\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobPlacementB\x03\xe0\x41\x02\x12>\n\nhadoop_job\x18\x03 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x06 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12?\n\x0bspark_r_job\x18\x15 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.SparkRJobH\x00\x12\x43\n\rspark_sql_job\x18\x0c \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x38\n\x06status\x18\x08 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12@\n\x0estatus_history\x18\r \x03(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12I\n\x11yarn_applications\x18\t \x03(\x0b\x32..google.cloud.dataproc.v1beta2.YarnApplication\x12\x14\n\x0csubmitted_by\x18\n \x01(\t\x12"\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\t\x12 \n\x18\x64river_control_files_uri\x18\x0f \x01(\t\x12>\n\x06labels\x18\x12 \x03(\x0b\x32..google.cloud.dataproc.v1beta2.Job.LabelsEntry\x12@\n\nscheduling\x18\x14 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x10\n\x08job_uuid\x18\x16 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job".\n\rJobScheduling\x12\x1d\n\x15max_failures_per_hour\x18\x01 \x01(\x05"\x8a\x01\n\x10SubmitJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x03job\x18\x02 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.JobB\x03\xe0\x41\x02\x12\x12\n\nrequest_id\x18\x04 \x01(\t"R\n\rGetJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x9f\x02\n\x0fListJobsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x04 \x01(\t\x12Y\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32>.google.cloud.dataproc.v1beta2.ListJobsRequest.JobStateMatcher\x12\x0e\n\x06\x66ilter\x18\x07 \x01(\t"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xc1\x01\n\x10UpdateJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x03job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.JobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"]\n\x10ListJobsResponse\x12\x30\n\x04jobs\x18\x01 \x03(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x10\x43\x61ncelJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"U\n\x10\x44\x65leteJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x32\xfb\t\n\rJobController\x12\xc2\x01\n\tSubmitJob\x12/.google.cloud.dataproc.v1beta2.SubmitJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"`\x82\xd3\xe4\x93\x02@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\x01*\xda\x41\x17project_id, region, job\x12\xbe\x01\n\x06GetJob\x12,.google.cloud.dataproc.v1beta2.GetJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"b\x82\xd3\xe4\x93\x02?\x12=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x1aproject_id, region, job_id\x12\xdb\x01\n\x08ListJobs\x12..google.cloud.dataproc.v1beta2.ListJobsRequest\x1a/.google.cloud.dataproc.v1beta2.ListJobsResponse"n\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta2/projects/{project_id}/regions/{region}/jobs\xda\x41\x12project_id, region\xda\x41\x1aproject_id, region, filter\x12\xac\x01\n\tUpdateJob\x12/.google.cloud.dataproc.v1beta2.UpdateJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"J\x82\xd3\xe4\x93\x02\x44\x32=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xce\x01\n\tCancelJob\x12/.google.cloud.dataproc.v1beta2.CancelJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"l\x82\xd3\xe4\x93\x02I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\xda\x41\x1aproject_id, region, job_id\x12\xb8\x01\n\tDeleteJob\x12/.google.cloud.dataproc.v1beta2.DeleteJobRequest\x1a\x16.google.protobuf.Empty"b\x82\xd3\xe4\x93\x02?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x1aproject_id, region, job_id\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBw\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, @@ -80,8 +84,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=427, - serialized_end=539, + serialized_start=485, + serialized_end=597, ) _sym_db.RegisterEnumDescriptor(_LOGGINGCONFIG_LEVEL) @@ -132,8 +136,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3618, - serialized_end=3787, + serialized_start=3696, + serialized_end=3865, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS_STATE) @@ -158,8 +162,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3789, - serialized_end=3861, + serialized_start=3867, + serialized_end=3939, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS_SUBSTATE) @@ -203,8 +207,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4059, - serialized_end=4194, + serialized_start=4162, + serialized_end=4297, ) _sym_db.RegisterEnumDescriptor(_YARNAPPLICATION_STATE) @@ -226,8 +230,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5740, - serialized_end=5794, + serialized_start=5889, + serialized_end=5943, ) _sym_db.RegisterEnumDescriptor(_LISTJOBSREQUEST_JOBSTATEMATCHER) @@ -284,8 +288,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=319, - serialized_end=425, + serialized_start=377, + serialized_end=483, ) _LOGGINGCONFIG = _descriptor.Descriptor( @@ -322,8 +326,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=208, - serialized_end=539, + serialized_start=266, + serialized_end=597, ) @@ -379,8 +383,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _HADOOPJOB = _descriptor.Descriptor( @@ -551,8 +555,8 @@ fields=[], ) ], - serialized_start=542, - serialized_end=891, + serialized_start=600, + serialized_end=949, ) @@ -608,8 +612,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _SPARKJOB = _descriptor.Descriptor( @@ -780,8 +784,8 @@ fields=[], ) ], - serialized_start=894, - serialized_end=1241, + serialized_start=952, + serialized_end=1299, ) @@ -837,8 +841,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _PYSPARKJOB = _descriptor.Descriptor( @@ -863,7 +867,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1001,8 +1005,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1244, - serialized_end=1590, + serialized_start=1302, + serialized_end=1653, ) @@ -1028,7 +1032,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -1040,8 +1044,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1592, - serialized_end=1620, + serialized_start=1655, + serialized_end=1688, ) @@ -1097,8 +1101,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1939, - serialized_end=1993, + serialized_start=2007, + serialized_end=2061, ) _HIVEJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1153,8 +1157,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _HIVEJOB = _descriptor.Descriptor( @@ -1289,8 +1293,8 @@ fields=[], ) ], - serialized_start=1623, - serialized_end=2055, + serialized_start=1691, + serialized_end=2123, ) @@ -1346,8 +1350,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1939, - serialized_end=1993, + serialized_start=2007, + serialized_end=2061, ) _SPARKSQLJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1402,8 +1406,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _SPARKSQLJOB = _descriptor.Descriptor( @@ -1538,8 +1542,8 @@ fields=[], ) ], - serialized_start=2058, - serialized_end=2543, + serialized_start=2126, + serialized_end=2611, ) @@ -1595,8 +1599,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1939, - serialized_end=1993, + serialized_start=2007, + serialized_end=2061, ) _PIGJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1651,8 +1655,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _PIGJOB = _descriptor.Descriptor( @@ -1805,8 +1809,8 @@ fields=[], ) ], - serialized_start=2546, - serialized_end=3045, + serialized_start=2614, + serialized_end=3113, ) @@ -1862,8 +1866,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _SPARKRJOB = _descriptor.Descriptor( @@ -1888,7 +1892,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1990,8 +1994,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3048, - serialized_end=3338, + serialized_start=3116, + serialized_end=3411, ) @@ -2017,7 +2021,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2047,8 +2051,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3340, - serialized_end=3398, + serialized_start=3413, + serialized_end=3476, ) @@ -2140,8 +2144,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3401, - serialized_end=3861, + serialized_start=3479, + serialized_end=3939, ) @@ -2167,7 +2171,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2197,8 +2201,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3863, - serialized_end=3913, + serialized_start=3941, + serialized_end=3996, ) @@ -2224,7 +2228,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2242,7 +2246,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2260,7 +2264,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2278,7 +2282,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2290,8 +2294,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3916, - serialized_end=4194, + serialized_start=3999, + serialized_end=4297, ) @@ -2347,8 +2351,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5215, - serialized_end=5260, + serialized_start=5323, + serialized_end=5368, ) _JOB = _descriptor.Descriptor( @@ -2391,7 +2395,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2699,8 +2703,8 @@ fields=[], ) ], - serialized_start=4197, - serialized_end=5272, + serialized_start=4300, + serialized_end=5380, ) @@ -2738,8 +2742,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5274, - serialized_end=5320, + serialized_start=5382, + serialized_end=5428, ) @@ -2765,7 +2769,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2783,7 +2787,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2801,7 +2805,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2831,8 +2835,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5322, - serialized_end=5445, + serialized_start=5431, + serialized_end=5569, ) @@ -2858,7 +2862,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2876,7 +2880,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2894,7 +2898,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2906,8 +2910,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5447, - serialized_end=5514, + serialized_start=5571, + serialized_end=5653, ) @@ -2933,7 +2937,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2951,7 +2955,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3053,8 +3057,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5517, - serialized_end=5794, + serialized_start=5656, + serialized_end=5943, ) @@ -3080,7 +3084,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3098,7 +3102,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3116,7 +3120,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3134,7 +3138,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3152,7 +3156,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -3164,8 +3168,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5797, - serialized_end=5965, + serialized_start=5946, + serialized_end=6139, ) @@ -3221,8 +3225,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5967, - serialized_end=6060, + serialized_start=6141, + serialized_end=6234, ) @@ -3248,7 +3252,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3266,7 +3270,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3284,7 +3288,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -3296,8 +3300,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6062, - serialized_end=6132, + serialized_start=6236, + serialized_end=6321, ) @@ -3323,7 +3327,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3341,7 +3345,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3359,7 +3363,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -3371,8 +3375,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6134, - serialized_end=6204, + serialized_start=6323, + serialized_end=6408, ) _LOGGINGCONFIG_DRIVERLOGLEVELSENTRY.fields_by_name[ @@ -3655,17 +3659,15 @@ DESCRIPTOR=_SPARKJOB, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", __doc__="""A Cloud Dataproc job for running `Apache - Spark `__ applications on YARN. + Spark `__ applications on YARN. The + specification of the main method to call to drive the job. Specify + either the jar file that contains the main class or the main class name. + To pass both a main jar and a main class in that jar, add the jar to + ``CommonJob.jar_file_uris``, and then specify the main class name in + ``main_class``. Attributes: - driver: - Required. The specification of the main method to call to - drive the job. Specify either the jar file that contains the - main class or the main class name. To pass both a main jar and - a main class in that jar, add the jar to - ``CommonJob.jar_file_uris``, and then specify the main class - name in ``main_class``. main_jar_file_uri: The HCFS URI of the jar file that contains the main class. main_class: @@ -4104,14 +4106,14 @@ Attributes: name: - Required. The application name. + Output only. The application name. state: - Required. The application state. + Output only. The application state. progress: - Required. The numerical progress of the application, from 1 to - 100. + Output only. The numerical progress of the application, from 1 + to 100. tracking_url: - Optional. The HTTP URL of the ApplicationMaster, + Optional. Output only. The HTTP URL of the ApplicationMaster, HistoryServer, or TimelineServer that provides application- specific information. The URL uses the internal hostname, and requires a proxy server for resolution and, possibly, access. @@ -4443,6 +4445,8 @@ _HADOOPJOB_PROPERTIESENTRY._options = None _SPARKJOB_PROPERTIESENTRY._options = None _PYSPARKJOB_PROPERTIESENTRY._options = None +_PYSPARKJOB.fields_by_name["main_python_file_uri"]._options = None +_QUERYLIST.fields_by_name["queries"]._options = None _HIVEJOB_SCRIPTVARIABLESENTRY._options = None _HIVEJOB_PROPERTIESENTRY._options = None _SPARKSQLJOB_SCRIPTVARIABLESENTRY._options = None @@ -4450,16 +4454,45 @@ _PIGJOB_SCRIPTVARIABLESENTRY._options = None _PIGJOB_PROPERTIESENTRY._options = None _SPARKRJOB_PROPERTIESENTRY._options = None +_SPARKRJOB.fields_by_name["main_r_file_uri"]._options = None +_JOBPLACEMENT.fields_by_name["cluster_name"]._options = None +_JOBREFERENCE.fields_by_name["project_id"]._options = None +_YARNAPPLICATION.fields_by_name["name"]._options = None +_YARNAPPLICATION.fields_by_name["state"]._options = None +_YARNAPPLICATION.fields_by_name["progress"]._options = None +_YARNAPPLICATION.fields_by_name["tracking_url"]._options = None _JOB_LABELSENTRY._options = None +_JOB.fields_by_name["placement"]._options = None +_SUBMITJOBREQUEST.fields_by_name["project_id"]._options = None +_SUBMITJOBREQUEST.fields_by_name["region"]._options = None +_SUBMITJOBREQUEST.fields_by_name["job"]._options = None +_GETJOBREQUEST.fields_by_name["project_id"]._options = None +_GETJOBREQUEST.fields_by_name["region"]._options = None +_GETJOBREQUEST.fields_by_name["job_id"]._options = None +_LISTJOBSREQUEST.fields_by_name["project_id"]._options = None +_LISTJOBSREQUEST.fields_by_name["region"]._options = None +_UPDATEJOBREQUEST.fields_by_name["project_id"]._options = None +_UPDATEJOBREQUEST.fields_by_name["region"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job_id"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job"]._options = None +_UPDATEJOBREQUEST.fields_by_name["update_mask"]._options = None +_CANCELJOBREQUEST.fields_by_name["project_id"]._options = None +_CANCELJOBREQUEST.fields_by_name["region"]._options = None +_CANCELJOBREQUEST.fields_by_name["job_id"]._options = None +_DELETEJOBREQUEST.fields_by_name["project_id"]._options = None +_DELETEJOBREQUEST.fields_by_name["region"]._options = None +_DELETEJOBREQUEST.fields_by_name["job_id"]._options = None _JOBCONTROLLER = _descriptor.ServiceDescriptor( name="JobController", full_name="google.cloud.dataproc.v1beta2.JobController", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=6207, - serialized_end=7242, + serialized_options=_b( + "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=6411, + serialized_end=7686, methods=[ _descriptor.MethodDescriptor( name="SubmitJob", @@ -4469,7 +4502,7 @@ input_type=_SUBMITJOBREQUEST, output_type=_JOB, serialized_options=_b( - '\202\323\344\223\002@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\001*' + '\202\323\344\223\002@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\001*\332A\027project_id, region, job' ), ), _descriptor.MethodDescriptor( @@ -4480,7 +4513,7 @@ input_type=_GETJOBREQUEST, output_type=_JOB, serialized_options=_b( - "\202\323\344\223\002?\022=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}" + "\202\323\344\223\002?\022=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\332A\032project_id, region, job_id" ), ), _descriptor.MethodDescriptor( @@ -4491,7 +4524,7 @@ input_type=_LISTJOBSREQUEST, output_type=_LISTJOBSRESPONSE, serialized_options=_b( - "\202\323\344\223\0026\0224/v1beta2/projects/{project_id}/regions/{region}/jobs" + "\202\323\344\223\0026\0224/v1beta2/projects/{project_id}/regions/{region}/jobs\332A\022project_id, region\332A\032project_id, region, filter" ), ), _descriptor.MethodDescriptor( @@ -4513,7 +4546,7 @@ input_type=_CANCELJOBREQUEST, output_type=_JOB, serialized_options=_b( - '\202\323\344\223\002I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\001*' + '\202\323\344\223\002I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\001*\332A\032project_id, region, job_id' ), ), _descriptor.MethodDescriptor( @@ -4524,7 +4557,7 @@ input_type=_DELETEJOBREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}" + "\202\323\344\223\002?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\332A\032project_id, region, job_id" ), ), ], diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/operations.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/operations.proto index 8f9252a46716..74cbde3cac69 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/operations.proto +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/operations.proto @@ -17,8 +17,8 @@ syntax = "proto3"; package google.cloud.dataproc.v1beta2; -import "google/api/annotations.proto"; import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc"; option java_multiple_files = true; diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2.py index 6c260f017d19..8a1d63b987b2 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2.py @@ -15,8 +15,8 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -27,11 +27,11 @@ "\n!com.google.cloud.dataproc.v1beta2B\017OperationsProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" ), serialized_pb=_b( - '\n4google/cloud/dataproc_v1beta2/proto/operations.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xfa\x01\n\x16\x43lusterOperationStatus\x12J\n\x05state\x18\x01 \x01(\x0e\x32;.google.cloud.dataproc.v1beta2.ClusterOperationStatus.State\x12\x13\n\x0binner_state\x18\x02 \x01(\t\x12\x0f\n\x07\x64\x65tails\x18\x03 \x01(\t\x12\x34\n\x10state_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"\x9f\x03\n\x18\x43lusterOperationMetadata\x12\x14\n\x0c\x63luster_name\x18\x07 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x08 \x01(\t\x12\x45\n\x06status\x18\t \x01(\x0b\x32\x35.google.cloud.dataproc.v1beta2.ClusterOperationStatus\x12M\n\x0estatus_history\x18\n \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.ClusterOperationStatus\x12\x16\n\x0eoperation_type\x18\x0b \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x0c \x01(\t\x12S\n\x06labels\x18\r \x03(\x0b\x32\x43.google.cloud.dataproc.v1beta2.ClusterOperationMetadata.LabelsEntry\x12\x10\n\x08warnings\x18\x0e \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42}\n!com.google.cloud.dataproc.v1beta2B\x0fOperationsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' + '\n4google/cloud/dataproc_v1beta2/proto/operations.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\xfa\x01\n\x16\x43lusterOperationStatus\x12J\n\x05state\x18\x01 \x01(\x0e\x32;.google.cloud.dataproc.v1beta2.ClusterOperationStatus.State\x12\x13\n\x0binner_state\x18\x02 \x01(\t\x12\x0f\n\x07\x64\x65tails\x18\x03 \x01(\t\x12\x34\n\x10state_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"\x9f\x03\n\x18\x43lusterOperationMetadata\x12\x14\n\x0c\x63luster_name\x18\x07 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x08 \x01(\t\x12\x45\n\x06status\x18\t \x01(\x0b\x32\x35.google.cloud.dataproc.v1beta2.ClusterOperationStatus\x12M\n\x0estatus_history\x18\n \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.ClusterOperationStatus\x12\x16\n\x0eoperation_type\x18\x0b \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x0c \x01(\t\x12S\n\x06labels\x18\r \x03(\x0b\x32\x43.google.cloud.dataproc.v1beta2.ClusterOperationMetadata.LabelsEntry\x12\x10\n\x08warnings\x18\x0e \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42}\n!com.google.cloud.dataproc.v1beta2B\x0fOperationsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto index edaf357cc6c5..6fd30974176c 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto @@ -18,6 +18,9 @@ syntax = "proto3"; package google.cloud.dataproc.v1beta2; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/dataproc/v1beta2/clusters.proto"; import "google/cloud/dataproc/v1beta2/jobs.proto"; import "google/longrunning/operations.proto"; @@ -32,6 +35,9 @@ option java_package = "com.google.cloud.dataproc.v1beta2"; // The API interface for managing Workflow Templates in the // Cloud Dataproc API. service WorkflowTemplateService { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Creates new workflow template. rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest) returns (WorkflowTemplate) { option (google.api.http) = { @@ -42,6 +48,7 @@ service WorkflowTemplateService { body: "template" } }; + option (google.api.method_signature) = "parent, template"; } // Retrieves the latest workflow template. @@ -55,6 +62,7 @@ service WorkflowTemplateService { get: "/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}" } }; + option (google.api.method_signature) = "name"; } // Instantiates a template and begins execution. @@ -70,7 +78,9 @@ service WorkflowTemplateService { // clusters to be deleted. // // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#workflowmetadata). + // Also see [Using + // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). // // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be @@ -84,6 +94,12 @@ service WorkflowTemplateService { body: "*" } }; + option (google.api.method_signature) = "name"; + option (google.api.method_signature) = "name, parameters"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "WorkflowMetadata" + }; } // Instantiates a template and begins execution. @@ -103,7 +119,9 @@ service WorkflowTemplateService { // clusters to be deleted. // // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + // Also see [Using + // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). // // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be @@ -117,6 +135,11 @@ service WorkflowTemplateService { body: "template" } }; + option (google.api.method_signature) = "parent, template"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "WorkflowMetadata" + }; } // Updates (replaces) workflow template. The updated template @@ -130,6 +153,7 @@ service WorkflowTemplateService { body: "template" } }; + option (google.api.method_signature) = "template"; } // Lists workflows that match the specified filter in the request. @@ -140,6 +164,7 @@ service WorkflowTemplateService { get: "/v1beta2/{parent=projects/*/locations/*}/workflowTemplates" } }; + option (google.api.method_signature) = "parent"; } // Deletes a workflow template. It does not cancel in-progress workflows. @@ -150,11 +175,19 @@ service WorkflowTemplateService { delete: "/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}" } }; + option (google.api.method_signature) = "name"; } } // A Cloud Dataproc workflow template resource. message WorkflowTemplate { + option (google.api.resource) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + pattern: "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}" + pattern: "projects/{project}/locations/{location}/workflowTemplates/{workflow_template}" + history: ORIGINALLY_SINGLE_PATTERN + }; + // Required. The template id. // // The id must contain only letters (a-z, A-Z), numbers (0-9), @@ -162,12 +195,19 @@ message WorkflowTemplate { // or hyphen. Must consist of between 3 and 50 characters. // // . - string id = 2; + string id = 2 [(google.api.field_behavior) = REQUIRED]; - // Output only. The "resource name" of the template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Output only. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. Used to perform a consistent read-modify-write. // @@ -178,13 +218,13 @@ message WorkflowTemplate { // the current template with the `version` field filled in with the // current server version. The user updates other fields in the template, // then returns it as part of the `UpdateWorkflowTemplate` request. - int32 version = 3; + int32 version = 3 [(google.api.field_behavior) = OPTIONAL]; // Output only. The time template was created. - google.protobuf.Timestamp create_time = 4; + google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time template was last updated. - google.protobuf.Timestamp update_time = 5; + google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The labels to associate with this template. These labels // will be propagated to all jobs and clusters created by the workflow @@ -198,7 +238,7 @@ message WorkflowTemplate { // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). // // No more than 32 labels can be associated with a template. - map labels = 6; + map labels = 6 [(google.api.field_behavior) = OPTIONAL]; // Required. WorkflowTemplate scheduling information. WorkflowTemplatePlacement placement = 7; @@ -209,7 +249,7 @@ message WorkflowTemplate { // Optional. Template parameters whose values are substituted into the // template. Values for parameters must be provided when the template is // instantiated. - repeated TemplateParameter parameters = 9; + repeated TemplateParameter parameters = 9 [(google.api.field_behavior) = OPTIONAL]; } // Specifies workflow execution target. @@ -336,7 +376,7 @@ message OrderedJob { // - Main class (in HadoopJob and SparkJob) // - Zone (in ClusterSelector) message TemplateParameter { - // Required. Parameter name. + // Required. Parameter name. // The parameter name is used as the key, and paired with the // parameter value, which are passed to the template when the template // is instantiated. @@ -442,57 +482,66 @@ message WorkflowMetadata { DONE = 3; } - // Output only. The "resource name" of the template. - string template = 1; + // Output only. The resource name of the workflow template as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string template = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The version of template at the time of // workflow instantiation. - int32 version = 2; + int32 version = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The create cluster operation metadata. - ClusterOperation create_cluster = 3; + ClusterOperation create_cluster = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The workflow graph. - WorkflowGraph graph = 4; + WorkflowGraph graph = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The delete cluster operation metadata. - ClusterOperation delete_cluster = 5; + ClusterOperation delete_cluster = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The workflow state. - State state = 6; + State state = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The name of the target cluster. - string cluster_name = 7; + string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Map from parameter names to values that were used for those parameters. map parameters = 8; // Output only. Workflow start time. - google.protobuf.Timestamp start_time = 9; + google.protobuf.Timestamp start_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Workflow end time. - google.protobuf.Timestamp end_time = 10; + google.protobuf.Timestamp end_time = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The UUID of target cluster. - string cluster_uuid = 11; + string cluster_uuid = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The cluster operation triggered by a workflow. message ClusterOperation { // Output only. The id of the cluster operation. - string operation_id = 1; + string operation_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Error, if operation failed. - string error = 2; + string error = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Indicates the operation is done. - bool done = 3; + bool done = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The workflow graph. message WorkflowGraph { // Output only. The workflow nodes. - repeated WorkflowNode nodes = 1; + repeated WorkflowNode nodes = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The workflow node. @@ -520,41 +569,65 @@ message WorkflowNode { } // Output only. The name of the node. - string step_id = 1; + string step_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Node's prerequisite nodes. - repeated string prerequisite_step_ids = 2; + repeated string prerequisite_step_ids = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The job id; populated after the node enters RUNNING state. - string job_id = 3; + string job_id = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The node state. - NodeState state = 5; + NodeState state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The error detail. - string error = 6; + string error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to create a workflow template. message CreateWorkflowTemplateRequest { - // Required. The "resource name" of the region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,create`, the resource name of the + // region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.create`, the resource name of + // the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Required. The Dataproc workflow template to create. - WorkflowTemplate template = 2; + WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to fetch a workflow template. message GetWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.get`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.get`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The version of workflow template to retrieve. Only previously - // instatiated versions can be retrieved. + // instantiated versions can be retrieved. // // If unspecified, retrieves the current version. int32 version = 2; @@ -562,10 +635,22 @@ message GetWorkflowTemplateRequest { // A request to instantiate a workflow template. message InstantiateWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The version of workflow template to instantiate. If specified, // the workflow will be instantiated only if the current version of @@ -596,13 +681,25 @@ message InstantiateWorkflowTemplateRequest { // A request to instantiate an inline workflow template. message InstantiateInlineWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,instantiateinline`, the resource + // name of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.instantiateinline`, the + // resource name of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Required. The workflow template to instantiate. - WorkflowTemplate template = 2; + WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; // Deprecated. Please use `request_id` field instead. string instance_id = 3; @@ -624,15 +721,27 @@ message UpdateWorkflowTemplateRequest { // Required. The updated workflow template. // // The `template.version` field must match the current version. - WorkflowTemplate template = 1; + WorkflowTemplate template = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; } // A request to list workflow templates in a project. message ListWorkflowTemplatesRequest { - // Required. The "resource name" of the region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,list`, the resource + // name of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.list`, the + // resource name of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The maximum number of results to return in each response. int32 page_size = 2; @@ -645,22 +754,34 @@ message ListWorkflowTemplatesRequest { // A response to a request to list workflow templates in a project. message ListWorkflowTemplatesResponse { // Output only. WorkflowTemplates list. - repeated WorkflowTemplate templates = 1; + repeated WorkflowTemplate templates = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. This token is included in the response if there are more // results to fetch. To fetch additional results, provide this value as the // page_token in a subsequent ListWorkflowTemplatesRequest. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to delete a workflow template. // // Currently started workflows will remain running. message DeleteWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.delete`, the resource name + // of the template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The version of workflow template to delete. If specified, // will only delete the template if the current server version matches diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py index 850d445dff25..2bed13fb2178 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.dataproc_v1beta2.proto import ( clusters_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2, ) @@ -37,10 +40,13 @@ "\n!com.google.cloud.dataproc.v1beta2B\026WorkflowTemplatesProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" ), serialized_pb=_b( - '\n\n\nhadoop_job\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x04 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x05 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x06 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12\x43\n\rspark_sql_job\x18\x07 \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x45\n\x06labels\x18\x08 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.OrderedJob.LabelsEntry\x12@\n\nscheduling\x18\t \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x1d\n\x15prerequisite_step_ids\x18\n \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x8e\x01\n\x11TemplateParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x46\n\nvalidation\x18\x04 \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ParameterValidation"\xab\x01\n\x13ParameterValidation\x12?\n\x05regex\x18\x01 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.RegexValidationH\x00\x12@\n\x06values\x18\x02 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.ValueValidationH\x00\x42\x11\n\x0fvalidation_type""\n\x0fRegexValidation\x12\x0f\n\x07regexes\x18\x01 \x03(\t"!\n\x0fValueValidation\x12\x0e\n\x06values\x18\x01 \x03(\t"\x96\x05\n\x10WorkflowMetadata\x12\x10\n\x08template\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12G\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperation\x12;\n\x05graph\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.WorkflowGraph\x12G\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperation\x12\x44\n\x05state\x18\x06 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowMetadata.State\x12\x14\n\x0c\x63luster_name\x18\x07 \x01(\t\x12S\n\nparameters\x18\x08 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.WorkflowMetadata.ParametersEntry\x12.\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x63luster_uuid\x18\x0b \x01(\t\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"E\n\x10\x43lusterOperation\x12\x14\n\x0coperation_id\x18\x01 \x01(\t\x12\r\n\x05\x65rror\x18\x02 \x01(\t\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08"K\n\rWorkflowGraph\x12:\n\x05nodes\x18\x01 \x03(\x0b\x32+.google.cloud.dataproc.v1beta2.WorkflowNode"\x90\x02\n\x0cWorkflowNode\x12\x0f\n\x07step_id\x18\x01 \x01(\t\x12\x1d\n\x15prerequisite_step_ids\x18\x02 \x03(\t\x12\x0e\n\x06job_id\x18\x03 \x01(\t\x12\x44\n\x05state\x18\x05 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowNode.NodeState\x12\r\n\x05\x65rror\x18\x06 \x01(\t"k\n\tNodeState\x12\x1b\n\x17NODE_STATUS_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"r\n\x1d\x43reateWorkflowTemplateRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplate";\n\x1aGetWorkflowTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05"\x8a\x02\n"InstantiateWorkflowTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x17\n\x0binstance_id\x18\x03 \x01(\tB\x02\x18\x01\x12\x12\n\nrequest_id\x18\x05 \x01(\t\x12\x65\n\nparameters\x18\x04 \x03(\x0b\x32Q.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.ParametersEntry\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa6\x01\n(InstantiateInlineWorkflowTemplateRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplate\x12\x13\n\x0binstance_id\x18\x03 \x01(\t\x12\x12\n\nrequest_id\x18\x04 \x01(\t"b\n\x1dUpdateWorkflowTemplateRequest\x12\x41\n\x08template\x18\x01 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplate"U\n\x1cListWorkflowTemplatesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"|\n\x1dListWorkflowTemplatesResponse\x12\x42\n\ttemplates\x18\x01 \x03(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplate\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t">\n\x1d\x44\x65leteWorkflowTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x32\xdf\x0f\n\x17WorkflowTemplateService\x12\x9d\x02\n\x16\x43reateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\x93\x01\x82\xd3\xe4\x93\x02\x8c\x01"8/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:\x08templateZF":/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:\x08template\x12\x81\x02\n\x13GetWorkflowTemplate\x12\x39.google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"~\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<\x12:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\x12\x9f\x02\n\x1bInstantiateWorkflowTemplate\x12\x41.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\x9d\x01\x82\xd3\xe4\x93\x02\x96\x01"D/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*ZK"F/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*\x12\xc5\x02\n!InstantiateInlineWorkflowTemplate\x12G.google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xb7\x01\x82\xd3\xe4\x93\x02\xb0\x01"L/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZV"J/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\x12\xaf\x02\n\x16UpdateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.UpdateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\xa5\x01\x82\xd3\xe4\x93\x02\x9e\x01\x1a\x41/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}:\x08templateZO\x1a\x43/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08template\x12\x92\x02\n\x15ListWorkflowTemplates\x12;.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest\x1a<.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse"~\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{parent=projects/*/regions/*}/workflowTemplatesZ<\x12:/v1beta2/{parent=projects/*/locations/*}/workflowTemplates\x12\xee\x01\n\x16\x44\x65leteWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest\x1a\x16.google.protobuf.Empty"~\x82\xd3\xe4\x93\x02x*8/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<*:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}B\x84\x01\n!com.google.cloud.dataproc.v1beta2B\x16WorkflowTemplatesProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' + '\n\n\nhadoop_job\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x04 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x05 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x06 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12\x43\n\rspark_sql_job\x18\x07 \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x45\n\x06labels\x18\x08 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.OrderedJob.LabelsEntry\x12@\n\nscheduling\x18\t \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x1d\n\x15prerequisite_step_ids\x18\n \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x8e\x01\n\x11TemplateParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x46\n\nvalidation\x18\x04 \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ParameterValidation"\xab\x01\n\x13ParameterValidation\x12?\n\x05regex\x18\x01 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.RegexValidationH\x00\x12@\n\x06values\x18\x02 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.ValueValidationH\x00\x42\x11\n\x0fvalidation_type""\n\x0fRegexValidation\x12\x0f\n\x07regexes\x18\x01 \x03(\t"!\n\x0fValueValidation\x12\x0e\n\x06values\x18\x01 \x03(\t"\xc8\x05\n\x10WorkflowMetadata\x12\x15\n\x08template\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x03\x12L\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperationB\x03\xe0\x41\x03\x12@\n\x05graph\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.WorkflowGraphB\x03\xe0\x41\x03\x12L\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperationB\x03\xe0\x41\x03\x12I\n\x05state\x18\x06 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowMetadata.StateB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12S\n\nparameters\x18\x08 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.WorkflowMetadata.ParametersEntry\x12\x33\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x0b \x01(\tB\x03\xe0\x41\x03\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"T\n\x10\x43lusterOperation\x12\x19\n\x0coperation_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04\x64one\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03"P\n\rWorkflowGraph\x12?\n\x05nodes\x18\x01 \x03(\x0b\x32+.google.cloud.dataproc.v1beta2.WorkflowNodeB\x03\xe0\x41\x03"\xa9\x02\n\x0cWorkflowNode\x12\x14\n\x07step_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12"\n\x15prerequisite_step_ids\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12I\n\x05state\x18\x05 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowNode.NodeStateB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x06 \x01(\tB\x03\xe0\x41\x03"k\n\tNodeState\x12\x1b\n\x17NODE_STATUS_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"\xa9\x01\n\x1d\x43reateWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x46\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x02"m\n\x1aGetWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05"\xbc\x02\n"InstantiateWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x17\n\x0binstance_id\x18\x03 \x01(\tB\x02\x18\x01\x12\x12\n\nrequest_id\x18\x05 \x01(\t\x12\x65\n\nparameters\x18\x04 \x03(\x0b\x32Q.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.ParametersEntry\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xdd\x01\n(InstantiateInlineWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x46\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x02\x12\x13\n\x0binstance_id\x18\x03 \x01(\t\x12\x12\n\nrequest_id\x18\x04 \x01(\t"\x94\x01\n\x1dUpdateWorkflowTemplateRequest\x12s\n\x08template\x18\x01 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate"Z\n\x1cListWorkflowTemplatesRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"\x86\x01\n\x1dListWorkflowTemplatesResponse\x12G\n\ttemplates\x18\x01 \x03(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"p\n\x1d\x44\x65leteWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05\x32\xe9\x11\n\x17WorkflowTemplateService\x12\xb0\x02\n\x16\x43reateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\xa6\x01\x82\xd3\xe4\x93\x02\x8c\x01"8/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:\x08templateZF":/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:\x08template\xda\x41\x10parent, template\x12\x89\x02\n\x13GetWorkflowTemplate\x12\x39.google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\x85\x01\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<\x12:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\xda\x41\x04name\x12\xe5\x02\n\x1bInstantiateWorkflowTemplate\x12\x41.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xe3\x01\x82\xd3\xe4\x93\x02\x96\x01"D/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*ZK"F/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*\xda\x41\x04name\xda\x41\x10name, parameters\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\x84\x03\n!InstantiateInlineWorkflowTemplate\x12G.google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xf6\x01\x82\xd3\xe4\x93\x02\xb0\x01"L/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZV"J/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\xda\x41\x10parent, template\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xba\x02\n\x16UpdateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.UpdateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\xb0\x01\x82\xd3\xe4\x93\x02\x9e\x01\x1a\x41/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}:\x08templateZO\x1a\x43/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08template\xda\x41\x08template\x12\x9c\x02\n\x15ListWorkflowTemplates\x12;.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest\x1a<.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse"\x87\x01\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{parent=projects/*/regions/*}/workflowTemplatesZ<\x12:/v1beta2/{parent=projects/*/locations/*}/workflowTemplates\xda\x41\x06parent\x12\xf6\x01\n\x16\x44\x65leteWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest\x1a\x16.google.protobuf.Empty"\x85\x01\x82\xd3\xe4\x93\x02x*8/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<*:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\xda\x41\x04name\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x84\x01\n!com.google.cloud.dataproc.v1beta2B\x16WorkflowTemplatesProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, @@ -71,8 +77,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3046, - serialized_end=3102, + serialized_start=3421, + serialized_end=3477, ) _sym_db.RegisterEnumDescriptor(_WORKFLOWMETADATA_STATE) @@ -107,8 +113,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3418, - serialized_end=3525, + serialized_start=3838, + serialized_end=3945, ) _sym_db.RegisterEnumDescriptor(_WORKFLOWNODE_NODESTATE) @@ -165,8 +171,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=767, - serialized_end=812, + serialized_start=887, + serialized_end=932, ) _WORKFLOWTEMPLATE = _descriptor.Descriptor( @@ -191,7 +197,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -209,7 +215,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -227,7 +233,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -245,7 +251,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -263,7 +269,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -281,7 +287,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -335,20 +341,22 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[_WORKFLOWTEMPLATE_LABELSENTRY], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A\306\001\n(dataproc.googleapis.com/WorkflowTemplate\022Iprojects/{project}/regions/{region}/workflowTemplates/{workflow_template}\022Mprojects/{project}/locations/{location}/workflowTemplates/{workflow_template} \001" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=325, - serialized_end=812, + serialized_start=410, + serialized_end=1137, ) @@ -412,8 +420,8 @@ fields=[], ) ], - serialized_start=815, - serialized_end=1005, + serialized_start=1140, + serialized_end=1330, ) @@ -469,8 +477,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=767, - serialized_end=812, + serialized_start=887, + serialized_end=932, ) _MANAGEDCLUSTER = _descriptor.Descriptor( @@ -543,8 +551,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1008, - serialized_end=1230, + serialized_start=1333, + serialized_end=1555, ) @@ -600,8 +608,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1357, - serialized_end=1409, + serialized_start=1682, + serialized_end=1734, ) _CLUSTERSELECTOR = _descriptor.Descriptor( @@ -656,8 +664,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1233, - serialized_end=1409, + serialized_start=1558, + serialized_end=1734, ) @@ -713,8 +721,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=767, - serialized_end=812, + serialized_start=887, + serialized_end=932, ) _ORDEREDJOB = _descriptor.Descriptor( @@ -921,8 +929,8 @@ fields=[], ) ], - serialized_start=1412, - serialized_end=2047, + serialized_start=1737, + serialized_end=2372, ) @@ -1014,8 +1022,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2050, - serialized_end=2192, + serialized_start=2375, + serialized_end=2517, ) @@ -1079,8 +1087,8 @@ fields=[], ) ], - serialized_start=2195, - serialized_end=2366, + serialized_start=2520, + serialized_end=2691, ) @@ -1118,8 +1126,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2368, - serialized_end=2402, + serialized_start=2693, + serialized_end=2727, ) @@ -1157,8 +1165,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2404, - serialized_end=2437, + serialized_start=2729, + serialized_end=2762, ) @@ -1214,8 +1222,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2995, - serialized_end=3044, + serialized_start=3370, + serialized_end=3419, ) _WORKFLOWMETADATA = _descriptor.Descriptor( @@ -1240,7 +1248,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1258,7 +1266,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1276,7 +1284,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1294,7 +1302,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1312,7 +1320,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1330,7 +1338,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1348,7 +1356,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1384,7 +1392,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1402,7 +1410,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1420,7 +1428,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1432,8 +1440,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2440, - serialized_end=3102, + serialized_start=2765, + serialized_end=3477, ) @@ -1459,7 +1467,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1477,7 +1485,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1495,7 +1503,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1507,8 +1515,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3104, - serialized_end=3173, + serialized_start=3479, + serialized_end=3563, ) @@ -1534,7 +1542,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ) ], @@ -1546,8 +1554,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3175, - serialized_end=3250, + serialized_start=3565, + serialized_end=3645, ) @@ -1573,7 +1581,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1591,7 +1599,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1609,7 +1617,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1627,7 +1635,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1645,7 +1653,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1657,8 +1665,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3253, - serialized_end=3525, + serialized_start=3648, + serialized_end=3945, ) @@ -1684,7 +1692,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A*\022(dataproc.googleapis.com/WorkflowTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1702,7 +1712,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1714,8 +1724,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3527, - serialized_end=3641, + serialized_start=3948, + serialized_end=4117, ) @@ -1741,7 +1751,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A*\n(dataproc.googleapis.com/WorkflowTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1771,8 +1783,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3643, - serialized_end=3702, + serialized_start=4119, + serialized_end=4228, ) @@ -1828,8 +1840,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2995, - serialized_end=3044, + serialized_start=3370, + serialized_end=3419, ) _INSTANTIATEWORKFLOWTEMPLATEREQUEST = _descriptor.Descriptor( @@ -1854,7 +1866,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A*\n(dataproc.googleapis.com/WorkflowTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1938,8 +1952,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3705, - serialized_end=3971, + serialized_start=4231, + serialized_end=4547, ) @@ -1965,7 +1979,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A*\022(dataproc.googleapis.com/WorkflowTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1983,7 +1999,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2031,8 +2047,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3974, - serialized_end=4140, + serialized_start=4550, + serialized_end=4771, ) @@ -2058,7 +2074,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A*\n(dataproc.googleapis.com/WorkflowTemplate" + ), file=DESCRIPTOR, ) ], @@ -2070,8 +2088,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4142, - serialized_end=4240, + serialized_start=4774, + serialized_end=4922, ) @@ -2097,7 +2115,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2145,8 +2163,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4242, - serialized_end=4327, + serialized_start=4924, + serialized_end=5014, ) @@ -2172,7 +2190,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2190,7 +2208,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2202,8 +2220,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4329, - serialized_end=4453, + serialized_start=5017, + serialized_end=5151, ) @@ -2229,7 +2247,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A*\n(dataproc.googleapis.com/WorkflowTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2259,8 +2279,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4455, - serialized_end=4517, + serialized_start=5153, + serialized_end=5265, ) _WORKFLOWTEMPLATE_LABELSENTRY.containing_type = _WORKFLOWTEMPLATE @@ -2489,10 +2509,16 @@ Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters. . name: - Output only. The "resource name" of the template, as described - in https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}/workflowTemplate - s/{template_id}`` + Output only. The resource name of the workflow template, as + described in + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates``, the resource name of + the template has the following format: ``projects/{proje + ct_id}/regions/{region}/workflowTemplates/{template_id}`` - + For ``projects.locations.workflowTemplates``, the resource + name of the template has the following format: ``project + s/{project_id}/locations/{location}/workflowTemplates/{templat + e_id}`` version: Optional. Used to perform a consistent read-modify-write. This field should be left blank for a @@ -2839,7 +2865,16 @@ Attributes: template: - Output only. The "resource name" of the template. + Output only. The resource name of the workflow template as + described in + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates``, the resource name of + the template has the following format: ``projects/{proje + ct_id}/regions/{region}/workflowTemplates/{template_id}`` - + For ``projects.locations.workflowTemplates``, the resource + name of the template has the following format: ``project + s/{project_id}/locations/{location}/workflowTemplates/{templat + e_id}`` version: Output only. The version of template at the time of workflow instantiation. @@ -2947,9 +2982,15 @@ Attributes: parent: - Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}`` + Required. The resource name of the region or location, as + described in + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates,create``, the resource + name of the region has the following format: + ``projects/{project_id}/regions/{region}`` - For + ``projects.locations.workflowTemplates.create``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` template: Required. The Dataproc workflow template to create. """, @@ -2969,14 +3010,19 @@ Attributes: name: - Required. The "resource name" of the workflow template, as + Required. The resource name of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}/workflowTemplate - s/{template_id}`` + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates.get``, the resource name + of the template has the following format: ``projects/{pr + oject_id}/regions/{region}/workflowTemplates/{template_id}`` + - For ``projects.locations.workflowTemplates.get``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates + /{template_id}`` version: Optional. The version of workflow template to retrieve. Only - previously instatiated versions can be retrieved. If + previously instantiated versions can be retrieved. If unspecified, retrieves the current version. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest) @@ -3004,11 +3050,17 @@ Attributes: name: - Required. The "resource name" of the workflow template, as + Required. The resource name of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}/workflowTemplate - s/{template_id}`` + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates.instantiate``, the + resource name of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{te + mplate_id}`` - For + ``projects.locations.workflowTemplates.instantiate``, the + resource name of the template has the following format: ``p + rojects/{project_id}/locations/{location}/workflowTemplates/{t + emplate_id}`` version: Optional. The version of workflow template to instantiate. If specified, the workflow will be instantiated only if the @@ -3048,10 +3100,15 @@ Attributes: parent: - Required. The "resource name" of the workflow template region, - as described in - https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}`` + Required. The resource name of the region or location, as + described in + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates,instantiateinline``, the + resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` - For + ``projects.locations.workflowTemplates.instantiateinline``, + the resource name of the location has the following format: + ``projects/{project_id}/locations/{location}`` template: Required. The workflow template to instantiate. instance_id: @@ -3101,9 +3158,15 @@ Attributes: parent: - Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}`` + Required. The resource name of the region or location, as + described in + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates,list``, the resource name + of the region has the following format: + ``projects/{project_id}/regions/{region}`` - For + ``projects.locations.workflowTemplates.list``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` page_size: Optional. The maximum number of results to return in each response. @@ -3152,11 +3215,17 @@ Attributes: name: - Required. The "resource name" of the workflow template, as + Required. The resource name of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}/workflowTemplate - s/{template_id}`` + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates.delete``, the resource + name of the template has the following format: ``project + s/{project_id}/regions/{region}/workflowTemplates/{template_id + }`` - For + ``projects.locations.workflowTemplates.instantiate``, the + resource name of the template has the following format: ``p + rojects/{project_id}/locations/{location}/workflowTemplates/{t + emplate_id}`` version: Optional. The version of workflow template to delete. If specified, will only delete the template if the current server @@ -3170,21 +3239,61 @@ DESCRIPTOR._options = None _WORKFLOWTEMPLATE_LABELSENTRY._options = None +_WORKFLOWTEMPLATE.fields_by_name["id"]._options = None +_WORKFLOWTEMPLATE.fields_by_name["name"]._options = None +_WORKFLOWTEMPLATE.fields_by_name["version"]._options = None +_WORKFLOWTEMPLATE.fields_by_name["create_time"]._options = None +_WORKFLOWTEMPLATE.fields_by_name["update_time"]._options = None +_WORKFLOWTEMPLATE.fields_by_name["labels"]._options = None +_WORKFLOWTEMPLATE.fields_by_name["parameters"]._options = None +_WORKFLOWTEMPLATE._options = None _MANAGEDCLUSTER_LABELSENTRY._options = None _CLUSTERSELECTOR_CLUSTERLABELSENTRY._options = None _ORDEREDJOB_LABELSENTRY._options = None _WORKFLOWMETADATA_PARAMETERSENTRY._options = None +_WORKFLOWMETADATA.fields_by_name["template"]._options = None +_WORKFLOWMETADATA.fields_by_name["version"]._options = None +_WORKFLOWMETADATA.fields_by_name["create_cluster"]._options = None +_WORKFLOWMETADATA.fields_by_name["graph"]._options = None +_WORKFLOWMETADATA.fields_by_name["delete_cluster"]._options = None +_WORKFLOWMETADATA.fields_by_name["state"]._options = None +_WORKFLOWMETADATA.fields_by_name["cluster_name"]._options = None +_WORKFLOWMETADATA.fields_by_name["start_time"]._options = None +_WORKFLOWMETADATA.fields_by_name["end_time"]._options = None +_WORKFLOWMETADATA.fields_by_name["cluster_uuid"]._options = None +_CLUSTEROPERATION.fields_by_name["operation_id"]._options = None +_CLUSTEROPERATION.fields_by_name["error"]._options = None +_CLUSTEROPERATION.fields_by_name["done"]._options = None +_WORKFLOWGRAPH.fields_by_name["nodes"]._options = None +_WORKFLOWNODE.fields_by_name["step_id"]._options = None +_WORKFLOWNODE.fields_by_name["prerequisite_step_ids"]._options = None +_WORKFLOWNODE.fields_by_name["job_id"]._options = None +_WORKFLOWNODE.fields_by_name["state"]._options = None +_WORKFLOWNODE.fields_by_name["error"]._options = None +_CREATEWORKFLOWTEMPLATEREQUEST.fields_by_name["parent"]._options = None +_CREATEWORKFLOWTEMPLATEREQUEST.fields_by_name["template"]._options = None +_GETWORKFLOWTEMPLATEREQUEST.fields_by_name["name"]._options = None _INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY._options = None +_INSTANTIATEWORKFLOWTEMPLATEREQUEST.fields_by_name["name"]._options = None _INSTANTIATEWORKFLOWTEMPLATEREQUEST.fields_by_name["instance_id"]._options = None +_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST.fields_by_name["parent"]._options = None +_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST.fields_by_name["template"]._options = None +_UPDATEWORKFLOWTEMPLATEREQUEST.fields_by_name["template"]._options = None +_LISTWORKFLOWTEMPLATESREQUEST.fields_by_name["parent"]._options = None +_LISTWORKFLOWTEMPLATESRESPONSE.fields_by_name["templates"]._options = None +_LISTWORKFLOWTEMPLATESRESPONSE.fields_by_name["next_page_token"]._options = None +_DELETEWORKFLOWTEMPLATEREQUEST.fields_by_name["name"]._options = None _WORKFLOWTEMPLATESERVICE = _descriptor.ServiceDescriptor( name="WorkflowTemplateService", full_name="google.cloud.dataproc.v1beta2.WorkflowTemplateService", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=4520, - serialized_end=6535, + serialized_options=_b( + "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=5268, + serialized_end=7549, methods=[ _descriptor.MethodDescriptor( name="CreateWorkflowTemplate", @@ -3194,7 +3303,7 @@ input_type=_CREATEWORKFLOWTEMPLATEREQUEST, output_type=_WORKFLOWTEMPLATE, serialized_options=_b( - '\202\323\344\223\002\214\001"8/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:\010templateZF":/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:\010template' + '\202\323\344\223\002\214\001"8/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:\010templateZF":/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:\010template\332A\020parent, template' ), ), _descriptor.MethodDescriptor( @@ -3205,7 +3314,7 @@ input_type=_GETWORKFLOWTEMPLATEREQUEST, output_type=_WORKFLOWTEMPLATE, serialized_options=_b( - "\202\323\344\223\002x\0228/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<\022:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}" + "\202\323\344\223\002x\0228/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<\022:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3216,7 +3325,7 @@ input_type=_INSTANTIATEWORKFLOWTEMPLATEREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002\226\001"D/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\001*ZK"F/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\001*' + '\202\323\344\223\002\226\001"D/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\001*ZK"F/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\001*\332A\004name\332A\020name, parameters\312A)\n\025google.protobuf.Empty\022\020WorkflowMetadata' ), ), _descriptor.MethodDescriptor( @@ -3227,7 +3336,7 @@ input_type=_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002\260\001"L/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\010templateZV"J/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\010template' + '\202\323\344\223\002\260\001"L/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\010templateZV"J/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\010template\332A\020parent, template\312A)\n\025google.protobuf.Empty\022\020WorkflowMetadata' ), ), _descriptor.MethodDescriptor( @@ -3238,7 +3347,7 @@ input_type=_UPDATEWORKFLOWTEMPLATEREQUEST, output_type=_WORKFLOWTEMPLATE, serialized_options=_b( - "\202\323\344\223\002\236\001\032A/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}:\010templateZO\032C/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}:\010template" + "\202\323\344\223\002\236\001\032A/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}:\010templateZO\032C/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}:\010template\332A\010template" ), ), _descriptor.MethodDescriptor( @@ -3249,7 +3358,7 @@ input_type=_LISTWORKFLOWTEMPLATESREQUEST, output_type=_LISTWORKFLOWTEMPLATESRESPONSE, serialized_options=_b( - "\202\323\344\223\002x\0228/v1beta2/{parent=projects/*/regions/*}/workflowTemplatesZ<\022:/v1beta2/{parent=projects/*/locations/*}/workflowTemplates" + "\202\323\344\223\002x\0228/v1beta2/{parent=projects/*/regions/*}/workflowTemplatesZ<\022:/v1beta2/{parent=projects/*/locations/*}/workflowTemplates\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3260,7 +3369,7 @@ input_type=_DELETEWORKFLOWTEMPLATEREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002x*8/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<*:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}" + "\202\323\344\223\002x*8/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<*:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\332A\004name" ), ), ], diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py index 012152a68d1a..e05372f50416 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py @@ -94,7 +94,9 @@ def InstantiateWorkflowTemplate(self, request, context): clusters to be deleted. The [Operation.metadata][google.longrunning.Operation.metadata] will be - [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#workflowmetadata). + Also see [Using + WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). On successful completion, [Operation.response][google.longrunning.Operation.response] will be @@ -122,7 +124,9 @@ def InstantiateInlineWorkflowTemplate(self, request, context): clusters to be deleted. The [Operation.metadata][google.longrunning.Operation.metadata] will be - [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + Also see [Using + WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). On successful completion, [Operation.response][google.longrunning.Operation.response] will be diff --git a/dataproc/synth.metadata b/dataproc/synth.metadata index 99c6b1a0eb60..dfd138d546ab 100644 --- a/dataproc/synth.metadata +++ b/dataproc/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:21:27.830464Z", + "updateTime": "2019-10-15T12:20:15.152154Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.39.0", + "dockerImage": "googleapis/artman@sha256:72554d0b3bdc0b4ac7d6726a6a606c00c14b454339037ed86be94574fb05d9f3" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "82e14b22669d5748d7a0922634159794ce0bf796", + "internalRef": "274692507" } }, { diff --git a/datastore/CHANGELOG.md b/datastore/CHANGELOG.md index 621e31cd680b..2dc7b6c7cc86 100644 --- a/datastore/CHANGELOG.md +++ b/datastore/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## 1.10.0 + +10-10-2019 12:20 PDT + + +### Implementation Changes +- Remove send / receive message size limit (via synth). ([#8952](https://github.com/googleapis/google-cloud-python/pull/8952)) + +### New Features +- Add `client_options` to constructors for manual clients. ([#9055](https://github.com/googleapis/google-cloud-python/pull/9055)) + +### Dependencies +- Pin `google-cloud-core >= 1.0.3, < 2.0.0dev`. ([#9055](https://github.com/googleapis/google-cloud-python/pull/9055)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update docs for building datastore indexes. ([#8707](https://github.com/googleapis/google-cloud-python/pull/8707)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 1.9.0 07-24-2019 16:04 PDT diff --git a/datastore/README.rst b/datastore/README.rst index e613092a58bf..bb685f04f9c9 100644 --- a/datastore/README.rst +++ b/datastore/README.rst @@ -105,7 +105,7 @@ Example Usage Next Steps ~~~~~~~~~~ -- Read the `Client Library Documentation`_ for Google Cloud Datastore API +- Read the `Client Library Documentation`_ for Google Cloud Datastore API to see other available methods on the client. - Read the `Product documentation`_ to learn more about the product and see How-to Guides. diff --git a/datastore/docs/conf.py b/datastore/docs/conf.py index a4c30084718e..64f9e995acf3 100644 --- a/datastore/docs/conf.py +++ b/datastore/docs/conf.py @@ -339,7 +339,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/datastore/google/cloud/datastore/client.py b/datastore/google/cloud/datastore/client.py index df9ce33a0bdf..69bbc0342eaf 100644 --- a/datastore/google/cloud/datastore/client.py +++ b/datastore/google/cloud/datastore/client.py @@ -15,6 +15,7 @@ import os +import google.api_core.client_options from google.cloud._helpers import _LocalStack from google.cloud._helpers import _determine_default_project as _base_default_project from google.cloud.client import ClientWithProject @@ -201,6 +202,11 @@ class Client(ClientWithProject): you only need to set this if you're developing your own library or partner tool. + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options on the + client. API Endpoint should be set through client_options. + :type _http: :class:`~requests.Session` :param _http: (Optional) HTTP object to make requests. Can be any object that defines ``request()`` with the same interface as @@ -228,6 +234,7 @@ def __init__( namespace=None, credentials=None, client_info=_CLIENT_INFO, + client_options=None, _http=None, _use_grpc=None, ): @@ -236,6 +243,7 @@ def __init__( ) self.namespace = namespace self._client_info = client_info + self._client_options = client_options self._batch_stack = _LocalStack() self._datastore_api_internal = None if _use_grpc is None: @@ -246,7 +254,15 @@ def __init__( host = os.environ[GCD_HOST] self._base_url = "http://" + host except KeyError: - self._base_url = _DATASTORE_BASE_URL + api_endpoint = _DATASTORE_BASE_URL + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + self._base_url = api_endpoint @staticmethod def _determine_default(project): diff --git a/datastore/setup.py b/datastore/setup.py index 7414c0ed29da..4fadd33db7ce 100644 --- a/datastore/setup.py +++ b/datastore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.9.0" +version = "1.10.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' @@ -30,7 +30,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", ] extras = {} diff --git a/datastore/tests/unit/test_client.py b/datastore/tests/unit/test_client.py index 171a93eda4db..5a7448fc8894 100644 --- a/datastore/tests/unit/test_client.py +++ b/datastore/tests/unit/test_client.py @@ -128,6 +128,7 @@ def _make_one( namespace=None, credentials=None, client_info=None, + client_options=None, _http=None, _use_grpc=None, ): @@ -136,6 +137,7 @@ def _make_one( namespace=namespace, credentials=credentials, client_info=client_info, + client_options=client_options, _http=_http, _use_grpc=_use_grpc, ) @@ -172,6 +174,7 @@ def test_constructor_w_implicit_inputs(self): self.assertIs(client._credentials, creds) self.assertIs(client._client_info, _CLIENT_INFO) self.assertIsNone(client._http_internal) + self.assertIsNone(client._client_options) self.assertEqual(client.base_url, _DATASTORE_BASE_URL) self.assertIsNone(client.current_batch) @@ -181,18 +184,20 @@ def test_constructor_w_implicit_inputs(self): _determine_default_project.assert_called_once_with(None) def test_constructor_w_explicit_inputs(self): - from google.cloud.datastore.client import _DATASTORE_BASE_URL + from google.api_core.client_options import ClientOptions other = "other" namespace = "namespace" creds = _make_credentials() client_info = mock.Mock() + client_options = ClientOptions("endpoint") http = object() client = self._make_one( project=other, namespace=namespace, credentials=creds, client_info=client_info, + client_options=client_options, _http=http, ) self.assertEqual(client.project, other) @@ -201,8 +206,8 @@ def test_constructor_w_explicit_inputs(self): self.assertIs(client._client_info, client_info) self.assertIs(client._http_internal, http) self.assertIsNone(client.current_batch) + self.assertIs(client._base_url, "endpoint") self.assertEqual(list(client._batch_stack), []) - self.assertEqual(client.base_url, _DATASTORE_BASE_URL) def test_constructor_use_grpc_default(self): import google.cloud.datastore.client as MUT @@ -243,12 +248,39 @@ def test_constructor_gcd_host(self): self.assertEqual(client.base_url, "http://" + host) def test_base_url_property(self): + from google.cloud.datastore.client import _DATASTORE_BASE_URL + from google.api_core.client_options import ClientOptions + alternate_url = "https://alias.example.com/" project = "PROJECT" creds = _make_credentials() http = object() + client_options = ClientOptions() - client = self._make_one(project=project, credentials=creds, _http=http) + client = self._make_one( + project=project, + credentials=creds, + _http=http, + client_options=client_options, + ) + self.assertEqual(client.base_url, _DATASTORE_BASE_URL) + client.base_url = alternate_url + self.assertEqual(client.base_url, alternate_url) + + def test_base_url_property_w_client_options(self): + alternate_url = "https://alias.example.com/" + project = "PROJECT" + creds = _make_credentials() + http = object() + client_options = {"api_endpoint": "endpoint"} + + client = self._make_one( + project=project, + credentials=creds, + _http=http, + client_options=client_options, + ) + self.assertEqual(client.base_url, "endpoint") client.base_url = alternate_url self.assertEqual(client.base_url, alternate_url) diff --git a/dlp/docs/conf.py b/dlp/docs/conf.py index 1385e6dd16f7..2d33b89f6062 100644 --- a/dlp/docs/conf.py +++ b/dlp/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/dlp/noxfile.py b/dlp/noxfile.py index a2eefbb6765f..45ecc8fff050 100644 --- a/dlp/noxfile.py +++ b/dlp/noxfile.py @@ -118,11 +118,25 @@ def system(session): session.install("-e", "../test_utils/") session.install("-e", ".") + env = {} + # Additional setup for VPCSC system tests + if os.environ.get("GOOGLE_CLOUD_TESTS_IN_VPCSC", "false").lower() != "true": + # Unset PROJECT_ID, since VPCSC system tests expect this to be a project + # within the VPCSC perimeter. + env = { + "PROJECT_ID": "", + "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT": os.environ.get( + "PROJECT_ID" + ), + } + # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run("py.test", "--quiet", system_test_path, env=env, *session.posargs) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", "--quiet", system_test_folder_path, env=env, *session.posargs + ) @nox.session(python="3.7") diff --git a/dlp/synth.metadata b/dlp/synth.metadata index 6ed9ce34fb47..aa33a4001070 100644 --- a/dlp/synth.metadata +++ b/dlp/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:23:29.991839Z", + "updateTime": "2019-10-05T12:21:54.232546Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" } }, { diff --git a/dlp/synth.py b/dlp/synth.py index 9a50da31e917..4a0b8d801de9 100644 --- a/dlp/synth.py +++ b/dlp/synth.py @@ -253,6 +253,6 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(unit_cov_level=97, cov_level=100) -s.move(templated_files) +s.move(templated_files, excludes=['noxfile.py']) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/dlp/tests/system/gapic/v2/test_system_dlp_service_v2_vpcsc.py b/dlp/tests/system/gapic/v2/test_system_dlp_service_v2_vpcsc.py new file mode 100644 index 000000000000..f075044d00e1 --- /dev/null +++ b/dlp/tests/system/gapic/v2/test_system_dlp_service_v2_vpcsc.py @@ -0,0 +1,521 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import pytest + +from google.cloud import dlp_v2 +from google.cloud.dlp_v2 import enums +from google.cloud.dlp_v2.proto import dlp_pb2 +from google.api_core import exceptions + +PROJECT_INSIDE = os.environ.get("PROJECT_ID", None) +PROJECT_OUTSIDE = os.environ.get( + "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", None +) +IS_INSIDE_VPCSC = ( + os.environ.get("GOOGLE_CLOUD_TESTS_IN_VPCSC", "false").lower() == "true" +) + + +class TestSystemDlpService(object): + @staticmethod + def _is_rejected(call): + try: + responses = call() + except exceptions.PermissionDenied as e: + return e.message == "Request is prohibited by organization's policy" + except: + pass + return False + + @staticmethod + def _do_test(delayed_inside, delayed_outside): + if IS_INSIDE_VPCSC: + assert TestSystemDlpService._is_rejected(delayed_outside) + assert not (TestSystemDlpService._is_rejected(delayed_inside)) + else: + assert not (TestSystemDlpService._is_rejected(delayed_outside)) + assert TestSystemDlpService._is_rejected(delayed_inside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_inspect_content(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.inspect_content(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.inspect_content(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_redact_image(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.redact_image(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.redact_image(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_deidentify_content(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.deidentify_content(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.deidentify_content(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_reidentify_content(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.reidentify_content(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.reidentify_content(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_create_inspect_template(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.create_inspect_template(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.create_inspect_template(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_update_inspect_template(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.update_inspect_template(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.update_inspect_template(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_get_inspect_template(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.get_inspect_template(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.get_inspect_template(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skip(reason="List tests are currently not supported") + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_list_inspect_templates(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.list_inspect_templates(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.list_inspect_templates(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_delete_inspect_template(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.delete_inspect_template(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.delete_inspect_template(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_create_deidentify_template(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.create_deidentify_template(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.create_deidentify_template(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_update_deidentify_template(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.update_deidentify_template(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.update_deidentify_template(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_get_deidentify_template(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.get_deidentify_template(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.get_deidentify_template(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skip(reason="List tests are currently not supported") + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_list_deidentify_templates(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.list_deidentify_templates(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.list_deidentify_templates(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_delete_deidentify_template(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.delete_deidentify_template(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.delete_deidentify_template(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_create_dlp_job(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.create_dlp_job(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.create_dlp_job(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skip(reason="List tests are currently not supported") + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_list_dlp_jobs(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.list_dlp_jobs(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.list_dlp_jobs(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_get_dlp_job(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.get_dlp_job(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.get_dlp_job(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_delete_dlp_job(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.delete_dlp_job(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.delete_dlp_job(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_cancel_dlp_job(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.cancel_dlp_job(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.cancel_dlp_job(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skip(reason="List tests are currently not supported") + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_list_job_triggers(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.list_job_triggers(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.list_job_triggers(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_get_job_trigger(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.get_job_trigger(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.get_job_trigger(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_delete_job_trigger(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.delete_job_trigger(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.delete_job_trigger(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_update_job_trigger(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.update_job_trigger(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.update_job_trigger(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_create_job_trigger(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.create_job_trigger(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.create_job_trigger(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_create_stored_info_type(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.create_stored_info_type(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.create_stored_info_type(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_update_stored_info_type(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.update_stored_info_type(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.update_stored_info_type(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_get_stored_info_type(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.get_stored_info_type(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.get_stored_info_type(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skip(reason="List tests are currently not supported") + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_list_stored_info_types(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.list_stored_info_types(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.list_stored_info_types(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test requires a VPCSC and setting GOOGLE_CLOUD_TESTS_IN_VPCSC", + ) + @pytest.mark.skipif( + PROJECT_OUTSIDE is None, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_delete_stored_info_type(self): + client = dlp_v2.DlpServiceClient() + name_inside = client.project_path(PROJECT_INSIDE) + delayed_inside = lambda: client.delete_stored_info_type(name_inside) + name_outside = client.project_path(PROJECT_OUTSIDE) + delayed_outside = lambda: client.delete_stored_info_type(name_outside) + TestSystemDlpService._do_test(delayed_inside, delayed_outside) diff --git a/dns/CHANGELOG.md b/dns/CHANGELOG.md index 19e7b74c322a..86088ee8e792 100644 --- a/dns/CHANGELOG.md +++ b/dns/CHANGELOG.md @@ -4,6 +4,22 @@ [1]: https://pypi.org/project/google-cloud-dns/#history +## 0.31.0 + +10-15-2019 06:42 PDT + + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +### Documentation +- Fix intersphinx reference to `requests`. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Fix broken links in docs. ([#9148](https://github.com/googleapis/google-cloud-python/pull/9148)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) + ## 0.30.2 07-11-2019 10:09 PDT diff --git a/dns/docs/conf.py b/dns/docs/conf.py index 00a467bd4ccf..103945f1ab7a 100644 --- a/dns/docs/conf.py +++ b/dns/docs/conf.py @@ -336,7 +336,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), } diff --git a/dns/google/cloud/dns/_http.py b/dns/google/cloud/dns/_http.py index 510681261bee..51f3f5634af7 100644 --- a/dns/google/cloud/dns/_http.py +++ b/dns/google/cloud/dns/_http.py @@ -29,15 +29,14 @@ class Connection(_http.JSONConnection): :param client_info: (Optional) instance used to generate user agent. """ - def __init__(self, client, client_info=None): - super(Connection, self).__init__(client, client_info) + DEFAULT_API_ENDPOINT = "https://dns.googleapis.com" + def __init__(self, client, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT): + super(Connection, self).__init__(client, client_info) + self.API_BASE_URL = api_endpoint self._client_info.gapic_version = __version__ self._client_info.client_library_version = __version__ - API_BASE_URL = "https://dns.googleapis.com" - """The base of the API call URL.""" - API_VERSION = "v1" """The version of the API, used in building the API call's URL.""" diff --git a/dns/google/cloud/dns/client.py b/dns/google/cloud/dns/client.py index 4bfa112d5226..f1817a3cac2c 100644 --- a/dns/google/cloud/dns/client.py +++ b/dns/google/cloud/dns/client.py @@ -15,6 +15,7 @@ """Client for interacting with the Google Cloud DNS API.""" from google.api_core import page_iterator +from google.api_core import client_options as client_options_mod from google.cloud.client import ClientWithProject from google.cloud.dns._http import Connection @@ -50,16 +51,37 @@ class Client(ClientWithProject): requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own library or partner tool. + + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options + on the client. API Endpoint should be set through client_options. """ SCOPE = ("https://www.googleapis.com/auth/ndev.clouddns.readwrite",) """The scopes required for authenticating as a Cloud DNS consumer.""" - def __init__(self, project=None, credentials=None, _http=None, client_info=None): + def __init__( + self, + project=None, + credentials=None, + _http=None, + client_info=None, + client_options=None, + ): super(Client, self).__init__( project=project, credentials=credentials, _http=_http ) - self._connection = Connection(self, client_info=client_info) + + kwargs = {"client_info": client_info} + if client_options: + if isinstance(client_options, dict): + client_options = client_options_mod.from_dict(client_options) + + if client_options.api_endpoint: + kwargs["api_endpoint"] = client_options.api_endpoint + + self._connection = Connection(self, **kwargs) def quotas(self): """Return DNS quotas for the project associated with this client. diff --git a/dns/setup.py b/dns/setup.py index 3e9da9607d04..29b45df05e83 100644 --- a/dns/setup.py +++ b/dns/setup.py @@ -22,14 +22,14 @@ name = 'google-cloud-dns' description = 'Google Cloud DNS API client library' -version = '0.30.2' +version = '0.31.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 3 - Alpha' dependencies = [ - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", ] extras = { } diff --git a/dns/tests/unit/test__http.py b/dns/tests/unit/test__http.py index 9c5198f0a7d2..d1b656c101a7 100644 --- a/dns/tests/unit/test__http.py +++ b/dns/tests/unit/test__http.py @@ -44,6 +44,12 @@ def test_build_api_url_w_extra_query_params(self): parms = dict(parse_qsl(qs)) self.assertEqual(parms["bar"], "baz") + def test_build_api_url_w_custom_endpoint(self): + custom_endpoint = "https://foo-dns.googleapis.com" + conn = self._make_one(object(), api_endpoint=custom_endpoint) + URI = "/".join([custom_endpoint, "dns", conn.API_VERSION, "foo"]) + self.assertEqual(conn.build_api_url("/foo"), URI) + def test_extra_headers(self): import requests from google.cloud import _http as base_http diff --git a/dns/tests/unit/test_client.py b/dns/tests/unit/test_client.py index 5ca6eec98a1f..2d1e274c98d9 100644 --- a/dns/tests/unit/test_client.py +++ b/dns/tests/unit/test_client.py @@ -37,7 +37,7 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_ctor(self): + def test_ctor_defaults(self): from google.api_core.client_info import ClientInfo from google.cloud.dns._http import Connection @@ -48,6 +48,9 @@ def test_ctor(self): self.assertIs(client._connection.credentials, creds) self.assertIs(client._connection.http, http) self.assertIsInstance(client._connection._client_info, ClientInfo) + self.assertEqual( + client._connection.API_BASE_URL, client._connection.DEFAULT_API_ENDPOINT + ) def test_ctor_w_client_info(self): from google.api_core.client_info import ClientInfo @@ -65,6 +68,55 @@ def test_ctor_w_client_info(self): self.assertIs(client._connection.http, http) self.assertIs(client._connection._client_info, client_info) + def test_ctor_w_empty_client_options_object(self): + from google.api_core.client_info import ClientInfo + from google.api_core.client_options import ClientOptions + from google.cloud.dns._http import Connection + + creds = _make_credentials() + http = object() + client = self._make_one( + project=self.PROJECT, + credentials=creds, + _http=http, + client_options=ClientOptions(), + ) + self.assertIsInstance(client._connection, Connection) + self.assertIs(client._connection.credentials, creds) + self.assertIs(client._connection.http, http) + self.assertIsInstance(client._connection._client_info, ClientInfo) + self.assertEqual( + client._connection.API_BASE_URL, client._connection.DEFAULT_API_ENDPOINT + ) + + def test_ctor_w_client_options_object(self): + from google.api_core.client_options import ClientOptions + + api_endpoint = "https://foo-dns.googleapis.com" + creds = _make_credentials() + http = object() + client_options = ClientOptions(api_endpoint=api_endpoint) + client = self._make_one( + project=self.PROJECT, + credentials=creds, + _http=http, + client_options=client_options, + ) + self.assertEqual(client._connection.API_BASE_URL, api_endpoint) + + def test_ctor_w_client_options_dict(self): + api_endpoint = "https://foo-dns.googleapis.com" + creds = _make_credentials() + http = object() + client_options = {"api_endpoint": api_endpoint} + client = self._make_one( + project=self.PROJECT, + credentials=creds, + _http=http, + client_options=client_options, + ) + self.assertEqual(client._connection.API_BASE_URL, api_endpoint) + def test_quotas_defaults(self): PATH = "projects/%s" % (self.PROJECT,) MANAGED_ZONES = 1234 diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index d44af429f535..000000000000 --- a/docs/Makefile +++ /dev/null @@ -1,153 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - -rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/google-cloud.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/google-cloud.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/google-cloud" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/google-cloud" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." diff --git a/docs/_static/custom.css b/docs/_static/custom.css deleted file mode 100644 index 3d0319dd337c..000000000000 --- a/docs/_static/custom.css +++ /dev/null @@ -1,16 +0,0 @@ -@import url('https://fonts.googleapis.com/css?family=Roboto|Roboto+Mono'); - -@media screen and (min-width: 1080px) { - div.document { - width: 1040px; - } -} - -code.descname { - color: #4885ed; -} - -th.field-name { - min-width: 100px; - color: #3cba54; -} diff --git a/docs/_static/images/favicon.ico b/docs/_static/images/favicon.ico deleted file mode 100644 index 23c553a2966c..000000000000 Binary files a/docs/_static/images/favicon.ico and /dev/null differ diff --git a/docs/_templates/autosummary/class.rst b/docs/_templates/autosummary/class.rst deleted file mode 120000 index bd3c7e22590e..000000000000 --- a/docs/_templates/autosummary/class.rst +++ /dev/null @@ -1 +0,0 @@ -../../../third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/class.rst \ No newline at end of file diff --git a/docs/_templates/autosummary/module.rst b/docs/_templates/autosummary/module.rst deleted file mode 120000 index afd9c7b5e867..000000000000 --- a/docs/_templates/autosummary/module.rst +++ /dev/null @@ -1 +0,0 @@ -../../../third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/module.rst \ No newline at end of file diff --git a/docs/asset b/docs/asset deleted file mode 120000 index 86aa41c40852..000000000000 --- a/docs/asset +++ /dev/null @@ -1 +0,0 @@ -../asset/docs \ No newline at end of file diff --git a/docs/automl b/docs/automl deleted file mode 120000 index 9e2b42485592..000000000000 --- a/docs/automl +++ /dev/null @@ -1 +0,0 @@ -../automl/docs \ No newline at end of file diff --git a/docs/bigquery b/docs/bigquery deleted file mode 120000 index eb7d0e491992..000000000000 --- a/docs/bigquery +++ /dev/null @@ -1 +0,0 @@ -../bigquery/docs/ \ No newline at end of file diff --git a/docs/bigquery_datatransfer b/docs/bigquery_datatransfer deleted file mode 120000 index 7e49b4933c3f..000000000000 --- a/docs/bigquery_datatransfer +++ /dev/null @@ -1 +0,0 @@ -../bigquery_datatransfer/docs/ \ No newline at end of file diff --git a/docs/bigquery_storage b/docs/bigquery_storage deleted file mode 120000 index 6c07150ad7db..000000000000 --- a/docs/bigquery_storage +++ /dev/null @@ -1 +0,0 @@ -../bigquery_storage/docs/ \ No newline at end of file diff --git a/docs/bigtable b/docs/bigtable deleted file mode 120000 index 27a5bfe5866c..000000000000 --- a/docs/bigtable +++ /dev/null @@ -1 +0,0 @@ -../bigtable/docs/ \ No newline at end of file diff --git a/docs/container b/docs/container deleted file mode 120000 index 348f01928ec5..000000000000 --- a/docs/container +++ /dev/null @@ -1 +0,0 @@ -../container/docs/ \ No newline at end of file diff --git a/docs/containeranalysis b/docs/containeranalysis deleted file mode 120000 index 5c1813ec7a94..000000000000 --- a/docs/containeranalysis +++ /dev/null @@ -1 +0,0 @@ -../containeranalysis/docs \ No newline at end of file diff --git a/docs/core/api_core_changelog.md b/docs/core/api_core_changelog.md deleted file mode 120000 index 0d7caa4a3e6d..000000000000 --- a/docs/core/api_core_changelog.md +++ /dev/null @@ -1 +0,0 @@ -../../api_core/CHANGELOG.md \ No newline at end of file diff --git a/docs/core/auth.rst b/docs/core/auth.rst deleted file mode 120000 index db1985a01d79..000000000000 --- a/docs/core/auth.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/auth.rst \ No newline at end of file diff --git a/docs/core/client.rst b/docs/core/client.rst deleted file mode 120000 index 0259d77ab42d..000000000000 --- a/docs/core/client.rst +++ /dev/null @@ -1 +0,0 @@ -../../core/docs/client.rst \ No newline at end of file diff --git a/docs/core/client_info.rst b/docs/core/client_info.rst deleted file mode 120000 index 011b596537b1..000000000000 --- a/docs/core/client_info.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/client_info.rst \ No newline at end of file diff --git a/docs/core/config.rst b/docs/core/config.rst deleted file mode 100644 index 139af98a42c5..000000000000 --- a/docs/core/config.rst +++ /dev/null @@ -1,68 +0,0 @@ -Configuration -************* - -Overview -======== - -Use service client objects to configure your applications. - -For example: - -.. code-block:: python - - >>> from google.cloud import bigquery - >>> client = bigquery.Client() - -When creating a client in this way, the project ID will be determined by -searching these locations in the following order. - -* GOOGLE_CLOUD_PROJECT environment variable -* GOOGLE_APPLICATION_CREDENTIALS JSON file -* Default service configuration path from - ``$ gcloud beta auth application-default login``. -* Google App Engine application ID -* Google Compute Engine project ID (from metadata server) - -You can override the detection of your default project by setting the - ``project`` parameter when creating client objects. - -.. code-block:: python - - >>> from google.cloud import bigquery - >>> client = bigquery.Client(project='my-project') - -You can see what project ID a client is referencing by accessing the ``project`` -property on the client object. - -.. code-block:: python - - >>> client.project - u'my-project' - -Authentication -============== - -The authentication credentials can be implicitly determined from the -environment or directly. See :doc:`/core/auth`. - -Logging in via ``gcloud beta auth application-default login`` will -automatically configure a JSON key file with your default project ID and -credentials. - -Setting the ``GOOGLE_APPLICATION_CREDENTIALS`` and ``GOOGLE_CLOUD_PROJECT`` -environment variables will override the automatically configured credentials. - -You can change your default project ID to ``my-new-default-project`` by -using the ``gcloud`` CLI tool to change the configuration. - -.. code-block:: bash - - $ gcloud config set project my-new-default-project - - -Environment Variables -===================== - -.. automodule:: google.cloud.environment_vars - :members: - :show-inheritance: diff --git a/docs/core/core_changelog.md b/docs/core/core_changelog.md deleted file mode 120000 index 3a3a672fc25e..000000000000 --- a/docs/core/core_changelog.md +++ /dev/null @@ -1 +0,0 @@ -../../core/CHANGELOG.md \ No newline at end of file diff --git a/docs/core/exceptions.rst b/docs/core/exceptions.rst deleted file mode 120000 index 47bcc6694524..000000000000 --- a/docs/core/exceptions.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/exceptions.rst \ No newline at end of file diff --git a/docs/core/helpers.rst b/docs/core/helpers.rst deleted file mode 120000 index 30f4943f9ff1..000000000000 --- a/docs/core/helpers.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/helpers.rst \ No newline at end of file diff --git a/docs/core/iam.rst b/docs/core/iam.rst deleted file mode 120000 index 349903e0f5a1..000000000000 --- a/docs/core/iam.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/iam.rst \ No newline at end of file diff --git a/docs/core/index.rst b/docs/core/index.rst deleted file mode 100644 index 45c68ad08ee2..000000000000 --- a/docs/core/index.rst +++ /dev/null @@ -1,31 +0,0 @@ -Core -==== - -.. toctree:: - config - auth - client - client_info - exceptions - helpers - iam - operation - operations_client - page_iterator - path_template - retry - timeout - -Changelog -~~~~~~~~~ - -The ``google-cloud-core`` package contains helpers common to all -``google-cloud-*`` packages. In an attempt to reach a stable API, -much of the functionality has been split out into a new package -``google-api-core``. - -.. toctree:: - :maxdepth: 2 - - ``google-api-core`` Changelog - ``google-cloud-core`` Changelog diff --git a/docs/core/operation.rst b/docs/core/operation.rst deleted file mode 120000 index 9a038fb6b79f..000000000000 --- a/docs/core/operation.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/operation.rst \ No newline at end of file diff --git a/docs/core/operations_client.rst b/docs/core/operations_client.rst deleted file mode 120000 index 17ab60406ba7..000000000000 --- a/docs/core/operations_client.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/operations_client.rst \ No newline at end of file diff --git a/docs/core/page_iterator.rst b/docs/core/page_iterator.rst deleted file mode 120000 index c815056363ab..000000000000 --- a/docs/core/page_iterator.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/page_iterator.rst \ No newline at end of file diff --git a/docs/core/path_template.rst b/docs/core/path_template.rst deleted file mode 120000 index 52a1bc7574b8..000000000000 --- a/docs/core/path_template.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/path_template.rst \ No newline at end of file diff --git a/docs/core/retry.rst b/docs/core/retry.rst deleted file mode 120000 index 0800fecb5177..000000000000 --- a/docs/core/retry.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/retry.rst \ No newline at end of file diff --git a/docs/core/timeout.rst b/docs/core/timeout.rst deleted file mode 120000 index 1ec11737305c..000000000000 --- a/docs/core/timeout.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/timeout.rst \ No newline at end of file diff --git a/docs/datacatalog b/docs/datacatalog deleted file mode 120000 index 41b79354a771..000000000000 --- a/docs/datacatalog +++ /dev/null @@ -1 +0,0 @@ -../datacatalog/docs \ No newline at end of file diff --git a/docs/datalabeling b/docs/datalabeling deleted file mode 120000 index 87c131ea5578..000000000000 --- a/docs/datalabeling +++ /dev/null @@ -1 +0,0 @@ -../datalabeling/docs \ No newline at end of file diff --git a/docs/dataproc b/docs/dataproc deleted file mode 120000 index 5547c1a0d489..000000000000 --- a/docs/dataproc +++ /dev/null @@ -1 +0,0 @@ -../dataproc/docs/ \ No newline at end of file diff --git a/docs/datastore b/docs/datastore deleted file mode 120000 index 8ce744facb5d..000000000000 --- a/docs/datastore +++ /dev/null @@ -1 +0,0 @@ -../datastore/docs/ \ No newline at end of file diff --git a/docs/dlp b/docs/dlp deleted file mode 120000 index b512b460a89c..000000000000 --- a/docs/dlp +++ /dev/null @@ -1 +0,0 @@ -../dlp/docs/ \ No newline at end of file diff --git a/docs/dns b/docs/dns deleted file mode 120000 index 9eea51dc71fe..000000000000 --- a/docs/dns +++ /dev/null @@ -1 +0,0 @@ -../dns/docs \ No newline at end of file diff --git a/docs/error-reporting b/docs/error-reporting deleted file mode 120000 index bb46a9501069..000000000000 --- a/docs/error-reporting +++ /dev/null @@ -1 +0,0 @@ -../error_reporting/docs/ \ No newline at end of file diff --git a/docs/firestore b/docs/firestore deleted file mode 120000 index 3c1d977bafcf..000000000000 --- a/docs/firestore +++ /dev/null @@ -1 +0,0 @@ -../firestore/docs/ \ No newline at end of file diff --git a/docs/grafeas b/docs/grafeas deleted file mode 120000 index 5e8270b35fb2..000000000000 --- a/docs/grafeas +++ /dev/null @@ -1 +0,0 @@ -../grafeas/docs \ No newline at end of file diff --git a/docs/iam b/docs/iam deleted file mode 120000 index 783f393b393c..000000000000 --- a/docs/iam +++ /dev/null @@ -1 +0,0 @@ -../iam/docs \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 09d44e06ccda..000000000000 --- a/docs/index.rst +++ /dev/null @@ -1,123 +0,0 @@ -.. toctree:: - :maxdepth: 1 - :hidden: - - Core Libraries - Asset Management - AutoML - BigQuery - BigQuery Data-Transfer - BigQuery Storage - Bigtable - Container - Container Analysis - Data Catalog - Data Labeling - Data Loss Prevention - Dataproc - Datastore - DNS - Firestore - Grafeas - IAM - IoT - Key Management - Natural Language - OSLogin - PubSub - Memorystore - Resource Manager - Runtime Configuration - Scheduler - Security Center - Security Scanner - Spanner - Speech - Stackdriver Error Reporting - Stackdriver Incident Response & Management - Stackdriver Logging - Stackdriver Monitoring - Stackdriver Trace - Storage - Talent - Tasks - Text-to-Speech - Translate - Video Intelligence - Vision - Web Risk - Release History - -Google Cloud Client Library for Python -====================================== - -Getting started ---------------- - -For more information on setting up your Python development environment, -such as installing ``pip`` and ``virtualenv`` on your system, please refer -to `Python Development Environment Setup Guide`_ for Google Cloud Platform. - -.. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup - -Cloud Datastore -~~~~~~~~~~~~~~~ - -`Google Cloud Datastore`_ is a fully managed, schemaless database for storing -non-relational data. - -.. _Google Cloud Datastore: https://cloud.google.com/datastore/ - -Install the ``google-cloud-datastore`` library using ``pip``: - -.. code-block:: console - - $ pip install google-cloud-datastore - -Example -^^^^^^^ - -.. code-block:: python - - from google.cloud import datastore - - client = datastore.Client() - key = client.key('Person') - - entity = datastore.Entity(key=key) - entity['name'] = 'Your name' - entity['age'] = 25 - client.put(entity) - -Cloud Storage -~~~~~~~~~~~~~ - -`Google Cloud Storage`_ allows you to store data on Google infrastructure. - -.. _Google Cloud Storage: https://cloud.google.com/storage/ - -Install the ``google-cloud-storage`` library using ``pip``: - -.. code-block:: console - - $ pip install google-cloud-storage - -Example -^^^^^^^ - -.. code-block:: python - - from google.cloud import storage - - client = storage.Client() - bucket = client.get_bucket('') - blob = bucket.blob('my-test-file.txt') - blob.upload_from_string('this is test content!') - -Resources -~~~~~~~~~ - -* `GitHub `__ -* `Issues `__ -* `Stack Overflow `__ -* `PyPI `__ diff --git a/docs/iot b/docs/iot deleted file mode 120000 index e7b6a0ab0473..000000000000 --- a/docs/iot +++ /dev/null @@ -1 +0,0 @@ -../iot/docs \ No newline at end of file diff --git a/docs/irm b/docs/irm deleted file mode 120000 index 02724766d326..000000000000 --- a/docs/irm +++ /dev/null @@ -1 +0,0 @@ -../irm/docs \ No newline at end of file diff --git a/docs/kms b/docs/kms deleted file mode 120000 index 503e417ec91c..000000000000 --- a/docs/kms +++ /dev/null @@ -1 +0,0 @@ -../kms/docs/ \ No newline at end of file diff --git a/docs/language b/docs/language deleted file mode 120000 index ab6f1cc1e084..000000000000 --- a/docs/language +++ /dev/null @@ -1 +0,0 @@ -../language/docs/ \ No newline at end of file diff --git a/docs/logging b/docs/logging deleted file mode 120000 index 072fd0d1a72f..000000000000 --- a/docs/logging +++ /dev/null @@ -1 +0,0 @@ -../logging/docs/ \ No newline at end of file diff --git a/docs/monitoring b/docs/monitoring deleted file mode 120000 index d7a58517323e..000000000000 --- a/docs/monitoring +++ /dev/null @@ -1 +0,0 @@ -../monitoring/docs \ No newline at end of file diff --git a/docs/oslogin b/docs/oslogin deleted file mode 120000 index d26f32b7b0da..000000000000 --- a/docs/oslogin +++ /dev/null @@ -1 +0,0 @@ -../oslogin/docs/ \ No newline at end of file diff --git a/docs/pubsub b/docs/pubsub deleted file mode 120000 index 75a8b87c5ae0..000000000000 --- a/docs/pubsub +++ /dev/null @@ -1 +0,0 @@ -../pubsub/docs/ \ No newline at end of file diff --git a/docs/redis b/docs/redis deleted file mode 120000 index 351c953543ba..000000000000 --- a/docs/redis +++ /dev/null @@ -1 +0,0 @@ -../redis/docs \ No newline at end of file diff --git a/docs/releases.rst b/docs/releases.rst deleted file mode 100644 index cdabdf539783..000000000000 --- a/docs/releases.rst +++ /dev/null @@ -1,60 +0,0 @@ -######################### -``google-cloud`` Releases -######################### - -.. attention:: The ``google-cloud`` package is deprecated - - The ``google-cloud`` package is no longer maintained or updated. Instead, - install the ``google-cloud-*`` subpackages directly. - - -The ``google-cloud`` package (formerly ``gcloud``) contains -**all** ``google-cloud-*`` subpackages. - -* ``gcloud==0.01`` (`PyPI `__) -* ``gcloud==0.02`` (`PyPI `__) -* ``gcloud==0.02.1`` (`PyPI `__) -* ``gcloud==0.02.2`` (`PyPI `__) -* ``gcloud==0.3.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.4.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.4.1`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.4.2`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.4.3`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.5.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.6.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.7.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.7.1`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.8.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.9.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.10.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.10.1`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.11.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.12.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.12.1`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.13.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.14.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.15.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.16.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.16.1`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.17.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.18.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.18.1`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.18.2`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.18.3`` (`PyPI `__, `Release Notes `__) -* ``0.19.0`` (`PyPI `__, `Release Notes `__) -* ``0.20.0`` (`PyPI `__, `Release Notes `__) -* ``0.21.0`` (`PyPI `__, `Release Notes `__) -* ``0.21.1`` (`PyPI `__, `Release Notes `__) -* ``0.22.0`` (`PyPI `__, `Release Notes `__) -* ``0.23.0`` (`PyPI `__, `Release Notes `__) -* ``0.24.0`` (`PyPI `__, `Release Notes `__) -* ``0.25.0`` (`PyPI `__, `Release Notes `__) -* ``0.25.1`` (`PyPI `__, `Release Notes `__) -* ``0.26.0`` (`PyPI `__, `Release Notes `__) -* ``0.26.1`` (`PyPI `__, `Release Notes `__) -* ``0.27.0`` (`PyPI `__, `Release Notes `__) -* ``0.28.0`` (`PyPI `__, `Release Notes `__) -* ``0.29.0`` (`PyPI `__, `Release Notes `__) -* ``0.30.0`` (`PyPI `__, `Release Notes `__) -* ``0.31.0`` (`PyPI `__, `Release Notes `__) -* ``0.32.0`` (`PyPI `__, `Release Notes `__) diff --git a/docs/requirements.txt b/docs/requirements.txt deleted file mode 100644 index c5e6e1519dce..000000000000 --- a/docs/requirements.txt +++ /dev/null @@ -1,51 +0,0 @@ -setuptools >= 36.4.0 -sphinx>=1.6.3, <2.2 -ipython >= 4 -recommonmark >= 0.4.0 -grpcio-gcp >= 0.2.2 - --e api_core/ --e core/ --e storage/ --e asset/ --e automl/ --e bigquery/ --e bigquery_datatransfer/ --e bigquery_storage/ --e bigtable/ --e container/ --e containeranalysis/ --e datacatalog/ --e datalabeling/ --e dataproc/ --e datastore/ --e dlp/ --e dns/ --e firestore/ --e grafeas/ --e iam/ --e iot/ --e irm/ --e kms/ --e language/ --e logging/ --e error_reporting/ --e monitoring/ --e pubsub/ --e oslogin/ --e redis/ --e resource_manager/ --e runtimeconfig/ --e scheduler/ --e securitycenter/ --e spanner/ --e speech/ --e talent/ --e tasks/ --e texttospeech/ --e trace/ --e translate/ --e videointelligence/ --e vision/ --e webrisk/ --e websecurityscanner/ diff --git a/docs/resource-manager b/docs/resource-manager deleted file mode 120000 index 2bd0e30aaf64..000000000000 --- a/docs/resource-manager +++ /dev/null @@ -1 +0,0 @@ -../resource_manager/docs/ \ No newline at end of file diff --git a/docs/runtimeconfig b/docs/runtimeconfig deleted file mode 120000 index 4a0a8ad2ddea..000000000000 --- a/docs/runtimeconfig +++ /dev/null @@ -1 +0,0 @@ -../runtimeconfig/docs \ No newline at end of file diff --git a/docs/scheduler b/docs/scheduler deleted file mode 120000 index 767cd9a38af9..000000000000 --- a/docs/scheduler +++ /dev/null @@ -1 +0,0 @@ -../scheduler/docs \ No newline at end of file diff --git a/docs/securitycenter b/docs/securitycenter deleted file mode 120000 index c17cf5ceaec8..000000000000 --- a/docs/securitycenter +++ /dev/null @@ -1 +0,0 @@ -../securitycenter/docs \ No newline at end of file diff --git a/docs/spanner b/docs/spanner deleted file mode 120000 index e934485326b2..000000000000 --- a/docs/spanner +++ /dev/null @@ -1 +0,0 @@ -../spanner/docs/ \ No newline at end of file diff --git a/docs/speech b/docs/speech deleted file mode 120000 index 828be4c44930..000000000000 --- a/docs/speech +++ /dev/null @@ -1 +0,0 @@ -../speech/docs/ \ No newline at end of file diff --git a/docs/storage b/docs/storage deleted file mode 120000 index de74c3c4f7cf..000000000000 --- a/docs/storage +++ /dev/null @@ -1 +0,0 @@ -../storage/docs/ \ No newline at end of file diff --git a/docs/talent b/docs/talent deleted file mode 120000 index 90b2d20990c2..000000000000 --- a/docs/talent +++ /dev/null @@ -1 +0,0 @@ -../talent/docs \ No newline at end of file diff --git a/docs/tasks b/docs/tasks deleted file mode 120000 index 5974558b192a..000000000000 --- a/docs/tasks +++ /dev/null @@ -1 +0,0 @@ -../tasks/docs \ No newline at end of file diff --git a/docs/texttospeech b/docs/texttospeech deleted file mode 120000 index fcb796b9487d..000000000000 --- a/docs/texttospeech +++ /dev/null @@ -1 +0,0 @@ -../texttospeech/docs/ \ No newline at end of file diff --git a/docs/trace b/docs/trace deleted file mode 120000 index a2361d0f9a55..000000000000 --- a/docs/trace +++ /dev/null @@ -1 +0,0 @@ -../trace/docs \ No newline at end of file diff --git a/docs/translate b/docs/translate deleted file mode 120000 index 8ce8fe7bf496..000000000000 --- a/docs/translate +++ /dev/null @@ -1 +0,0 @@ -../translate/docs \ No newline at end of file diff --git a/docs/videointelligence b/docs/videointelligence deleted file mode 120000 index 7f22ebf79f2b..000000000000 --- a/docs/videointelligence +++ /dev/null @@ -1 +0,0 @@ -../videointelligence/docs/ \ No newline at end of file diff --git a/docs/vision b/docs/vision deleted file mode 120000 index bd63572ac784..000000000000 --- a/docs/vision +++ /dev/null @@ -1 +0,0 @@ -../vision/docs \ No newline at end of file diff --git a/docs/webrisk b/docs/webrisk deleted file mode 120000 index 6b09785486bf..000000000000 --- a/docs/webrisk +++ /dev/null @@ -1 +0,0 @@ -../webrisk/docs \ No newline at end of file diff --git a/docs/websecurityscanner b/docs/websecurityscanner deleted file mode 120000 index a0bc42093b44..000000000000 --- a/docs/websecurityscanner +++ /dev/null @@ -1 +0,0 @@ -../websecurityscanner/docs/ \ No newline at end of file diff --git a/documentai/.coveragerc b/documentai/.coveragerc new file mode 100644 index 000000000000..b178b094aa1d --- /dev/null +++ b/documentai/.coveragerc @@ -0,0 +1,19 @@ +# Generated by synthtool. DO NOT EDIT! +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore abstract methods + raise NotImplementedError +omit = + */gapic/*.py + */proto/*.py + */core/*.py + */site-packages/*.py \ No newline at end of file diff --git a/documentai/.flake8 b/documentai/.flake8 new file mode 100644 index 000000000000..0268ecc9c55c --- /dev/null +++ b/documentai/.flake8 @@ -0,0 +1,14 @@ +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + *_pb2.py + + # Standard linting exemptions. + __pycache__, + .git, + *.pyc, + conf.py diff --git a/documentai/.repo-metadata.json b/documentai/.repo-metadata.json new file mode 100644 index 000000000000..3f8832984651 --- /dev/null +++ b/documentai/.repo-metadata.json @@ -0,0 +1,13 @@ +{ + "name": "documentai", + "name_pretty": "Cloud Document Understanding API", + "product_documentation": "https://cloud.google.com/document-understanding/docs/", + "client_documentation": "https://googleapis.dev/python/documentai/latest", + "issue_tracker": "", + "release_level": "alpha", + "language": "python", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-documentai", + "api_id": "documentai.googleapis.com", + "requires_billing": true +} \ No newline at end of file diff --git a/documentai/CHANGELOG.md b/documentai/CHANGELOG.md new file mode 100644 index 000000000000..825c32f0d03d --- /dev/null +++ b/documentai/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog diff --git a/documentai/LICENSE b/documentai/LICENSE new file mode 100644 index 000000000000..a8ee855de2aa --- /dev/null +++ b/documentai/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/documentai/MANIFEST.in b/documentai/MANIFEST.in new file mode 100644 index 000000000000..9cbf175afe6b --- /dev/null +++ b/documentai/MANIFEST.in @@ -0,0 +1,5 @@ +include README.rst LICENSE +recursive-include google *.json *.proto +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/documentai/README.rst b/documentai/README.rst new file mode 100644 index 000000000000..13151dbcca07 --- /dev/null +++ b/documentai/README.rst @@ -0,0 +1,77 @@ +Python Client for Cloud Document AI API (`Alpha`_) +================================================== + +`Cloud Document AI API`_: Service to parse structured information from unstructured or +semi-structured documents using state-of-the-art Google AI such as natural +language, computer vision, translation, and AutoML. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Alpha: https://github.com/googleapis/google-cloud-python/blob/master/README.rst +.. _Cloud Document AI API: https://cloud.google.com/document-understanding/docs/ +.. _Client Library Documentation: https://googleapis.dev/python/documentai/latest +.. _Product Documentation: https://cloud.google.com/document-understanding/docs/ + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud Document AI API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud Document AI API.: https://cloud.google.com/document-understanding/docs/ +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-documentai + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-documentai + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud Document AI API + API to see other available methods on the client. +- Read the `Cloud Document AI API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud Document AI API Product documentation: https://cloud.google.com/document-understanding/docs/ +.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst \ No newline at end of file diff --git a/documentai/docs/README.rst b/documentai/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/documentai/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/documentai/docs/changelog.md b/documentai/docs/changelog.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/documentai/docs/changelog.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/documentai/docs/conf.py b/documentai/docs/conf.py new file mode 100644 index 000000000000..682903abef0d --- /dev/null +++ b/documentai/docs/conf.py @@ -0,0 +1,363 @@ +# -*- coding: utf-8 -*- +# +# google-cloud-documentai documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-documentai" +copyright = u"2017, Google" +author = u"Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-documentai-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-documentai.tex", + u"google-cloud-documentai Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-documentai", + u"google-cloud-documentai Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-documentai", + u"google-cloud-documentai Documentation", + author, + "google-cloud-documentai", + "GAPIC library for the {metadata.shortName} v1beta1 service", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), + "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), + "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/documentai/docs/gapic/v1beta1/api.rst b/documentai/docs/gapic/v1beta1/api.rst new file mode 100644 index 000000000000..e588392f040c --- /dev/null +++ b/documentai/docs/gapic/v1beta1/api.rst @@ -0,0 +1,6 @@ +Client for Cloud Document AI API +================================ + +.. automodule:: google.cloud.documentai_v1beta1 + :members: + :inherited-members: \ No newline at end of file diff --git a/documentai/docs/gapic/v1beta1/types.rst b/documentai/docs/gapic/v1beta1/types.rst new file mode 100644 index 000000000000..469eb1074bfe --- /dev/null +++ b/documentai/docs/gapic/v1beta1/types.rst @@ -0,0 +1,5 @@ +Types for Cloud Document AI API Client +====================================== + +.. automodule:: google.cloud.documentai_v1beta1.types + :members: \ No newline at end of file diff --git a/documentai/docs/index.rst b/documentai/docs/index.rst new file mode 100644 index 000000000000..f15473e5a1e7 --- /dev/null +++ b/documentai/docs/index.rst @@ -0,0 +1,19 @@ +.. include:: README.rst + +Api Reference +------------- +.. toctree:: + :maxdepth: 2 + + gapic/v1beta1/api + gapic/v1beta1/types + +Changelog +--------- + +For a list of all ``google-cloud-documentai`` releases: + +.. toctree:: + :maxdepth: 2 + + changelog \ No newline at end of file diff --git a/documentai/google/__init__.py b/documentai/google/__init__.py new file mode 100644 index 000000000000..8fcc60e2b9c6 --- /dev/null +++ b/documentai/google/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/documentai/google/cloud/__init__.py b/documentai/google/cloud/__init__.py new file mode 100644 index 000000000000..8fcc60e2b9c6 --- /dev/null +++ b/documentai/google/cloud/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/documentai/google/cloud/documentai.py b/documentai/google/cloud/documentai.py new file mode 100644 index 000000000000..436c2d771509 --- /dev/null +++ b/documentai/google/cloud/documentai.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.cloud.documentai_v1beta1 import DocumentUnderstandingServiceClient +from google.cloud.documentai_v1beta1 import enums +from google.cloud.documentai_v1beta1 import types + + +__all__ = ("enums", "types", "DocumentUnderstandingServiceClient") diff --git a/documentai/google/cloud/documentai_v1beta1/__init__.py b/documentai/google/cloud/documentai_v1beta1/__init__.py new file mode 100644 index 000000000000..beaf5faa1ed6 --- /dev/null +++ b/documentai/google/cloud/documentai_v1beta1/__init__.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.cloud.documentai_v1beta1 import types +from google.cloud.documentai_v1beta1.gapic import document_understanding_service_client +from google.cloud.documentai_v1beta1.gapic import enums + + +class DocumentUnderstandingServiceClient( + document_understanding_service_client.DocumentUnderstandingServiceClient +): + __doc__ = ( + document_understanding_service_client.DocumentUnderstandingServiceClient.__doc__ + ) + enums = enums + + +__all__ = ("enums", "types", "DocumentUnderstandingServiceClient") diff --git a/documentai/google/cloud/documentai_v1beta1/gapic/__init__.py b/documentai/google/cloud/documentai_v1beta1/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/video_intelligence_service_client.py b/documentai/google/cloud/documentai_v1beta1/gapic/document_understanding_service_client.py similarity index 61% rename from videointelligence/google/cloud/videointelligence_v1beta1/gapic/video_intelligence_service_client.py rename to documentai/google/cloud/documentai_v1beta1/gapic/document_understanding_service_client.py index 6a22632a434a..2e1b20fab8ad 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/video_intelligence_service_client.py +++ b/documentai/google/cloud/documentai_v1beta1/gapic/document_understanding_service_client.py @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Accesses the google.cloud.videointelligence.v1beta1 VideoIntelligenceService API.""" +"""Accesses the google.cloud.documentai.v1beta1 DocumentUnderstandingService API.""" import pkg_resources import warnings @@ -24,37 +24,42 @@ import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.operation import google.api_core.operations_v1 import grpc -from google.cloud.videointelligence_v1beta1.gapic import enums -from google.cloud.videointelligence_v1beta1.gapic import ( - video_intelligence_service_client_config, +from google.cloud.documentai_v1beta1.gapic import ( + document_understanding_service_client_config, ) -from google.cloud.videointelligence_v1beta1.gapic.transports import ( - video_intelligence_service_grpc_transport, +from google.cloud.documentai_v1beta1.gapic import enums +from google.cloud.documentai_v1beta1.gapic.transports import ( + document_understanding_service_grpc_transport, ) -from google.cloud.videointelligence_v1beta1.proto import video_intelligence_pb2 -from google.cloud.videointelligence_v1beta1.proto import video_intelligence_pb2_grpc +from google.cloud.documentai_v1beta1.proto import document_understanding_pb2 +from google.cloud.documentai_v1beta1.proto import document_understanding_pb2_grpc from google.longrunning import operations_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-videointelligence" + "google-cloud-documentai" ).version -class VideoIntelligenceServiceClient(object): - """Service that implements Google Cloud Video Intelligence API.""" +class DocumentUnderstandingServiceClient(object): + """ + Service to parse structured information from unstructured or semi-structured + documents using state-of-the-art Google AI such as natural language, + computer vision, and translation. + """ - SERVICE_ADDRESS = "videointelligence.googleapis.com:443" + SERVICE_ADDRESS = "documentai.googleapis.com:443" """The default address of the service.""" # The name of the interface for this client. This is the key used to # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.videointelligence.v1beta1.VideoIntelligenceService" + _INTERFACE_NAME = "google.cloud.documentai.v1beta1.DocumentUnderstandingService" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): @@ -68,7 +73,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - VideoIntelligenceServiceClient: The constructed client. + DocumentUnderstandingServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -88,8 +93,8 @@ def __init__( """Constructor. Args: - transport (Union[~.VideoIntelligenceServiceGrpcTransport, - Callable[[~.Credentials, type], ~.VideoIntelligenceServiceGrpcTransport]): A transport + transport (Union[~.DocumentUnderstandingServiceGrpcTransport, + Callable[[~.Credentials, type], ~.DocumentUnderstandingServiceGrpcTransport]): A transport instance, responsible for actually making the API calls. The default transport uses the gRPC protocol. This argument may also be a callable which returns a @@ -126,7 +131,7 @@ def __init__( stacklevel=2, ) else: - client_config = video_intelligence_service_client_config.config + client_config = document_understanding_service_client_config.config if channel: warnings.warn( @@ -151,7 +156,7 @@ def __init__( if callable(transport): self.transport = transport( credentials=credentials, - default_class=video_intelligence_service_grpc_transport.VideoIntelligenceServiceGrpcTransport, + default_class=document_understanding_service_grpc_transport.DocumentUnderstandingServiceGrpcTransport, address=api_endpoint, ) else: @@ -162,7 +167,7 @@ def __init__( ) self.transport = transport else: - self.transport = video_intelligence_service_grpc_transport.VideoIntelligenceServiceGrpcTransport( + self.transport = document_understanding_service_grpc_transport.DocumentUnderstandingServiceGrpcTransport( address=api_endpoint, channel=channel, credentials=credentials ) @@ -189,35 +194,26 @@ def __init__( self._inner_api_calls = {} # Service calls - def annotate_video( + def batch_process_documents( self, - input_uri, - features, - input_content=None, - video_context=None, - output_uri=None, - location_id=None, + requests, + parent=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ - Performs asynchronous video annotation. Progress and results can be - retrieved through the ``google.longrunning.Operations`` interface. - ``Operation.metadata`` contains ``AnnotateVideoProgress`` (progress). - ``Operation.response`` contains ``AnnotateVideoResponse`` (results). + LRO endpoint to batch process many documents. Example: - >>> from google.cloud import videointelligence_v1beta1 - >>> from google.cloud.videointelligence_v1beta1 import enums + >>> from google.cloud import documentai_v1beta1 >>> - >>> client = videointelligence_v1beta1.VideoIntelligenceServiceClient() + >>> client = documentai_v1beta1.DocumentUnderstandingServiceClient() >>> - >>> input_uri = 'gs://cloud-samples-data/video/cat.mp4' - >>> features_element = enums.Feature.LABEL_DETECTION - >>> features = [features_element] + >>> # TODO: Initialize `requests`: + >>> requests = [] >>> - >>> response = client.annotate_video(input_uri, features) + >>> response = client.batch_process_documents(requests) >>> >>> def callback(operation_future): ... # Handle result. @@ -229,35 +225,15 @@ def annotate_video( >>> metadata = response.metadata() Args: - input_uri (str): Input video location. Currently, only `Google Cloud - Storage `__ URIs are supported, which - must be specified in the following format: ``gs://bucket-id/object-id`` - (other URI formats return ``google.rpc.Code.INVALID_ARGUMENT``). For - more information, see `Request - URIs `__. A video - URI may include wildcards in ``object-id``, and thus identify multiple - videos. Supported wildcards: '\*' to match 0 or more characters; '?' to - match 1 character. If unset, the input video should be embedded in the - request as ``input_content``. If set, ``input_content`` should be unset. - features (list[~google.cloud.videointelligence_v1beta1.types.Feature]): Requested video annotation features. - input_content (str): The video data bytes. Encoding: base64. If unset, the input video(s) - should be specified via ``input_uri``. If set, ``input_uri`` should be - unset. - video_context (Union[dict, ~google.cloud.videointelligence_v1beta1.types.VideoContext]): Additional video context and/or feature-specific parameters. + requests (list[Union[dict, ~google.cloud.documentai_v1beta1.types.ProcessDocumentRequest]]): Required. Individual requests for each document. If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.videointelligence_v1beta1.types.VideoContext` - output_uri (str): Optional location where the output (in JSON format) should be stored. - Currently, only `Google Cloud - Storage `__ URIs are supported, which - must be specified in the following format: ``gs://bucket-id/object-id`` - (other URI formats return ``google.rpc.Code.INVALID_ARGUMENT``). For - more information, see `Request - URIs `__. - location_id (str): Optional cloud region where annotation should take place. Supported - cloud regions: ``us-east1``, ``us-west1``, ``europe-west1``, - ``asia-east1``. If no region is specified, a region will be determined - based on video file location. + message :class:`~google.cloud.documentai_v1beta1.types.ProcessDocumentRequest` + parent (str): Target project and location to make a call. + + Format: ``projects/{project-id}/locations/{location-id}``. + + If no location is specified, a region will be chosen automatically. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -268,7 +244,7 @@ def annotate_video( that is provided to the method. Returns: - A :class:`~google.cloud.videointelligence_v1beta1.types._OperationFuture` instance. + A :class:`~google.cloud.documentai_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -278,30 +254,38 @@ def annotate_video( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "annotate_video" not in self._inner_api_calls: + if "batch_process_documents" not in self._inner_api_calls: self._inner_api_calls[ - "annotate_video" + "batch_process_documents" ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.annotate_video, - default_retry=self._method_configs["AnnotateVideo"].retry, - default_timeout=self._method_configs["AnnotateVideo"].timeout, + self.transport.batch_process_documents, + default_retry=self._method_configs["BatchProcessDocuments"].retry, + default_timeout=self._method_configs["BatchProcessDocuments"].timeout, client_info=self._client_info, ) - request = video_intelligence_pb2.AnnotateVideoRequest( - input_uri=input_uri, - features=features, - input_content=input_content, - video_context=video_context, - output_uri=output_uri, - location_id=location_id, + request = document_understanding_pb2.BatchProcessDocumentsRequest( + requests=requests, parent=parent ) - operation = self._inner_api_calls["annotate_video"]( + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["batch_process_documents"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, - video_intelligence_pb2.AnnotateVideoResponse, - metadata_type=video_intelligence_pb2.AnnotateVideoProgress, + document_understanding_pb2.BatchProcessDocumentsResponse, + metadata_type=document_understanding_pb2.OperationMetadata, ) diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/video_intelligence_service_client_config.py b/documentai/google/cloud/documentai_v1beta1/gapic/document_understanding_service_client_config.py similarity index 60% rename from videointelligence/google/cloud/videointelligence_v1beta1/gapic/video_intelligence_service_client_config.py rename to documentai/google/cloud/documentai_v1beta1/gapic/document_understanding_service_client_config.py index fdf442f5c941..256950744b7d 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/video_intelligence_service_client_config.py +++ b/documentai/google/cloud/documentai_v1beta1/gapic/document_understanding_service_client_config.py @@ -1,23 +1,23 @@ config = { "interfaces": { - "google.cloud.videointelligence.v1beta1.VideoIntelligenceService": { + "google.cloud.documentai.v1beta1.DocumentUnderstandingService": { "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], "non_idempotent": [], }, "retry_params": { "default": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 2.5, - "max_retry_delay_millis": 120000, - "initial_rpc_timeout_millis": 120000, + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 120000, + "max_rpc_timeout_millis": 20000, "total_timeout_millis": 600000, } }, "methods": { - "AnnotateVideo": { + "BatchProcessDocuments": { "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", diff --git a/documentai/google/cloud/documentai_v1beta1/gapic/enums.py b/documentai/google/cloud/documentai_v1beta1/gapic/enums.py new file mode 100644 index 000000000000..4c907fee0b16 --- /dev/null +++ b/documentai/google/cloud/documentai_v1beta1/gapic/enums.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class Document(object): + class Page(object): + class Layout(object): + class Orientation(enum.IntEnum): + """ + Detected human reading orientation. + + Attributes: + ORIENTATION_UNSPECIFIED (int): Unspecified orientation. + PAGE_UP (int): Orientation is aligned with page up. + PAGE_RIGHT (int): Orientation is aligned with page right. + Turn the head 90 degrees clockwise from upright to read. + PAGE_DOWN (int): Orientation is aligned with page down. + Turn the head 180 degrees from upright to read. + PAGE_LEFT (int): Orientation is aligned with page left. + Turn the head 90 degrees counterclockwise from upright to read. + """ + + ORIENTATION_UNSPECIFIED = 0 + PAGE_UP = 1 + PAGE_RIGHT = 2 + PAGE_DOWN = 3 + PAGE_LEFT = 4 + + class Token(object): + class DetectedBreak(object): + class Type(enum.IntEnum): + """ + Enum to denote the type of break found. + + Attributes: + TYPE_UNSPECIFIED (int): Unspecified break type. + SPACE (int): A single whitespace. + WIDE_SPACE (int): A wider whitespace. + HYPHEN (int): A hyphen that indicates that a token has been split across lines. + """ + + TYPE_UNSPECIFIED = 0 + SPACE = 1 + WIDE_SPACE = 2 + HYPHEN = 3 + + +class OperationMetadata(object): + class State(enum.IntEnum): + """ + Attributes: + STATE_UNSPECIFIED (int): The default value. This value is used if the state is omitted. + ACCEPTED (int): Request is received. + WAITING (int): Request operation is waiting for scheduling. + RUNNING (int): Request is being processed. + SUCCEEDED (int): The batch processing completed successfully. + CANCELLED (int): The batch processing was cancelled. + FAILED (int): The batch processing has failed. + """ + + STATE_UNSPECIFIED = 0 + ACCEPTED = 1 + WAITING = 2 + RUNNING = 3 + SUCCEEDED = 4 + CANCELLED = 5 + FAILED = 6 diff --git a/documentai/google/cloud/documentai_v1beta1/gapic/transports/__init__.py b/documentai/google/cloud/documentai_v1beta1/gapic/transports/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/transports/video_intelligence_service_grpc_transport.py b/documentai/google/cloud/documentai_v1beta1/gapic/transports/document_understanding_service_grpc_transport.py similarity index 80% rename from videointelligence/google/cloud/videointelligence_v1beta1/gapic/transports/video_intelligence_service_grpc_transport.py rename to documentai/google/cloud/documentai_v1beta1/gapic/transports/document_understanding_service_grpc_transport.py index 9eaba1a970b3..7d2f13919a3e 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/transports/video_intelligence_service_grpc_transport.py +++ b/documentai/google/cloud/documentai_v1beta1/gapic/transports/document_understanding_service_grpc_transport.py @@ -18,12 +18,12 @@ import google.api_core.grpc_helpers import google.api_core.operations_v1 -from google.cloud.videointelligence_v1beta1.proto import video_intelligence_pb2_grpc +from google.cloud.documentai_v1beta1.proto import document_understanding_pb2_grpc -class VideoIntelligenceServiceGrpcTransport(object): +class DocumentUnderstandingServiceGrpcTransport(object): """gRPC transport class providing stubs for - google.cloud.videointelligence.v1beta1 VideoIntelligenceService API. + google.cloud.documentai.v1beta1 DocumentUnderstandingService API. The transport provides access to the raw gRPC stubs, which can be used to take advantage of advanced @@ -35,10 +35,7 @@ class VideoIntelligenceServiceGrpcTransport(object): _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) def __init__( - self, - channel=None, - credentials=None, - address="videointelligence.googleapis.com:443", + self, channel=None, credentials=None, address="documentai.googleapis.com:443" ): """Instantiate the transport class. @@ -76,7 +73,7 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { - "video_intelligence_service_stub": video_intelligence_pb2_grpc.VideoIntelligenceServiceStub( + "document_understanding_service_stub": document_understanding_pb2_grpc.DocumentUnderstandingServiceStub( channel ) } @@ -90,7 +87,7 @@ def __init__( @classmethod def create_channel( - cls, address="videointelligence.googleapis.com:443", credentials=None, **kwargs + cls, address="documentai.googleapis.com:443", credentials=None, **kwargs ): """Create and return a gRPC channel object. @@ -121,17 +118,14 @@ def channel(self): return self._channel @property - def annotate_video(self): - """Return the gRPC stub for :meth:`VideoIntelligenceServiceClient.annotate_video`. + def batch_process_documents(self): + """Return the gRPC stub for :meth:`DocumentUnderstandingServiceClient.batch_process_documents`. - Performs asynchronous video annotation. Progress and results can be - retrieved through the ``google.longrunning.Operations`` interface. - ``Operation.metadata`` contains ``AnnotateVideoProgress`` (progress). - ``Operation.response`` contains ``AnnotateVideoResponse`` (results). + LRO endpoint to batch process many documents. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ - return self._stubs["video_intelligence_service_stub"].AnnotateVideo + return self._stubs["document_understanding_service_stub"].BatchProcessDocuments diff --git a/documentai/google/cloud/documentai_v1beta1/proto/__init__.py b/documentai/google/cloud/documentai_v1beta1/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/documentai/google/cloud/documentai_v1beta1/proto/document.proto b/documentai/google/cloud/documentai_v1beta1/proto/document.proto new file mode 100644 index 000000000000..1303c32dd2ba --- /dev/null +++ b/documentai/google/cloud/documentai_v1beta1/proto/document.proto @@ -0,0 +1,446 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.documentai.v1beta1; + +import "google/api/annotations.proto"; +import "google/cloud/documentai/v1beta1/geometry.proto"; +import "google/rpc/status.proto"; +import "google/type/color.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/documentai/v1beta1;documentai"; +option java_multiple_files = true; +option java_outer_classname = "DocumentProto"; +option java_package = "com.google.cloud.documentai.v1beta1"; + +// Document represents the canonical document resource in Document Understanding +// AI. +// It is an interchange format that provides insights into documents and allows +// for collaboration between users and Document Understanding AI to iterate and +// optimize for quality. +message Document { + // For a large document, sharding may be performed to produce several + // document shards. Each document shard contains this field to detail which + // shard it is. + message ShardInfo { + // The 0-based index of this shard. + int64 shard_index = 1; + + // Total number of shards. + int64 shard_count = 2; + + // The index of the first character in + // [Document.text][google.cloud.documentai.v1beta1.Document.text] in the + // overall document global text. + int64 text_offset = 3; + } + + // Annotation for common text style attributes. This adheres to CSS + // conventions as much as possible. + message Style { + // Font size with unit. + message FontSize { + // Font size for the text. + float size = 1; + + // Unit for the font size. Follows CSS naming (in, px, pt, etc.). + string unit = 2; + } + + // Text anchor indexing into the + // [Document.text][google.cloud.documentai.v1beta1.Document.text]. + TextAnchor text_anchor = 1; + + // Text color. + google.type.Color color = 2; + + // Text background color. + google.type.Color background_color = 3; + + // Font weight. Possible values are normal, bold, bolder, and lighter. + // https://www.w3schools.com/cssref/pr_font_weight.asp + string font_weight = 4; + + // Text style. Possible values are normal, italic, and oblique. + // https://www.w3schools.com/cssref/pr_font_font-style.asp + string text_style = 5; + + // Text decoration. Follows CSS standard. + // + // https://www.w3schools.com/cssref/pr_text_text-decoration.asp + string text_decoration = 6; + + // Font size. + FontSize font_size = 7; + } + + // A page in a [Document][google.cloud.documentai.v1beta1.Document]. + message Page { + // Dimension for the page. + message Dimension { + // Page width. + float width = 1; + + // Page height. + float height = 2; + + // Dimension unit. + string unit = 3; + } + + // Visual element describing a layout unit on a page. + message Layout { + // Detected human reading orientation. + enum Orientation { + // Unspecified orientation. + ORIENTATION_UNSPECIFIED = 0; + + // Orientation is aligned with page up. + PAGE_UP = 1; + + // Orientation is aligned with page right. + // Turn the head 90 degrees clockwise from upright to read. + PAGE_RIGHT = 2; + + // Orientation is aligned with page down. + // Turn the head 180 degrees from upright to read. + PAGE_DOWN = 3; + + // Orientation is aligned with page left. + // Turn the head 90 degrees counterclockwise from upright to read. + PAGE_LEFT = 4; + } + + // Text anchor indexing into the + // [Document.text][google.cloud.documentai.v1beta1.Document.text]. + TextAnchor text_anchor = 1; + + // Confidence of the current + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] within + // context of the object this layout is for. e.g. confidence can be for a + // single token, a table, a visual element, etc. depending on context. + // Range [0, 1]. + float confidence = 2; + + // The bounding polygon for the + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout]. + BoundingPoly bounding_poly = 3; + + // Detected orientation for the + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout]. + Orientation orientation = 4; + } + + // A block has a set of lines (collected into paragraphs) that have a + // common line-spacing and orientation. + message Block { + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] for + // [Block][google.cloud.documentai.v1beta1.Document.Page.Block]. + Layout layout = 1; + + // A list of detected languages together with confidence. + repeated DetectedLanguage detected_languages = 2; + } + + // A collection of lines that a human would perceive as a paragraph. + message Paragraph { + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] for + // [Paragraph][google.cloud.documentai.v1beta1.Document.Page.Paragraph]. + Layout layout = 1; + + // A list of detected languages together with confidence. + repeated DetectedLanguage detected_languages = 2; + } + + // A collection of tokens that a human would perceive as a line. + // Does not cross column boundaries, can be horizontal, vertical, etc. + message Line { + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] for + // [Line][google.cloud.documentai.v1beta1.Document.Page.Line]. + Layout layout = 1; + + // A list of detected languages together with confidence. + repeated DetectedLanguage detected_languages = 2; + } + + // A detected token. + message Token { + // Detected break at the end of a + // [Token][google.cloud.documentai.v1beta1.Document.Page.Token]. + message DetectedBreak { + // Enum to denote the type of break found. + enum Type { + // Unspecified break type. + TYPE_UNSPECIFIED = 0; + + // A single whitespace. + SPACE = 1; + + // A wider whitespace. + WIDE_SPACE = 2; + + // A hyphen that indicates that a token has been split across lines. + HYPHEN = 3; + } + + // Detected break type. + Type type = 1; + } + + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] for + // [Token][google.cloud.documentai.v1beta1.Document.Page.Token]. + Layout layout = 1; + + // Detected break at the end of a + // [Token][google.cloud.documentai.v1beta1.Document.Page.Token]. + DetectedBreak detected_break = 2; + + // A list of detected languages together with confidence. + repeated DetectedLanguage detected_languages = 3; + } + + // Detected non-text visual elements e.g. checkbox, signature etc. on the + // page. + message VisualElement { + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] for + // [Token][google.cloud.documentai.v1beta1.Document.Page.Token]. + Layout layout = 1; + + // Type of the + // [VisualElement][google.cloud.documentai.v1beta1.Document.Page.VisualElement]. + string type = 2; + + // A list of detected languages together with confidence. + repeated DetectedLanguage detected_languages = 3; + } + + // A table representation similar to HTML table structure. + message Table { + // A row of table cells. + message TableRow { + // Cells that make up this row. + repeated TableCell cells = 1; + } + + // A cell representation inside the table. + message TableCell { + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] for + // [TableCell][google.cloud.documentai.v1beta1.Document.Page.Table.TableCell]. + Layout layout = 1; + + // How many rows this cell spans. + int32 row_span = 2; + + // How many columns this cell spans. + int32 col_span = 3; + + // A list of detected languages together with confidence. + repeated DetectedLanguage detected_languages = 4; + } + + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] for + // [Table][google.cloud.documentai.v1beta1.Document.Page.Table]. + Layout layout = 1; + + // Header rows of the table. + repeated TableRow header_rows = 2; + + // Body rows of the table. + repeated TableRow body_rows = 3; + + // A list of detected languages together with confidence. + repeated DetectedLanguage detected_languages = 4; + } + + // A form field detected on the page. + message FormField { + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] for the + // [FormField][google.cloud.documentai.v1beta1.Document.Page.FormField] + // name. e.g. `Address`, `Email`, `Grand total`, `Phone number`, etc. + Layout field_name = 1; + + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] for the + // [FormField][google.cloud.documentai.v1beta1.Document.Page.FormField] + // value. + Layout field_value = 2; + + // A list of detected languages for name together with confidence. + repeated DetectedLanguage name_detected_languages = 3; + + // A list of detected languages for value together with confidence. + repeated DetectedLanguage value_detected_languages = 4; + } + + // Detected language for a structural component. + message DetectedLanguage { + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + string language_code = 1; + + // Confidence of detected language. Range [0, 1]. + float confidence = 2; + } + + // 1-based index for current + // [Page][google.cloud.documentai.v1beta1.Document.Page] in a parent + // [Document][google.cloud.documentai.v1beta1.Document]. Useful when a page + // is taken out of a [Document][google.cloud.documentai.v1beta1.Document] + // for individual processing. + int32 page_number = 1; + + // Physical dimension of the page. + Dimension dimension = 2; + + // [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] for the + // page. + Layout layout = 3; + + // A list of detected languages together with confidence. + repeated DetectedLanguage detected_languages = 4; + + // A list of visually detected text blocks on the page. + // A block has a set of lines (collected into paragraphs) that have a common + // line-spacing and orientation. + repeated Block blocks = 5; + + // A list of visually detected text paragraphs on the page. + // A collection of lines that a human would perceive as a paragraph. + repeated Paragraph paragraphs = 6; + + // A list of visually detected text lines on the page. + // A collection of tokens that a human would perceive as a line. + repeated Line lines = 7; + + // A list of visually detected tokens on the page. + repeated Token tokens = 8; + + // A list of detected non-text visual elements e.g. checkbox, + // signature etc. on the page. + repeated VisualElement visual_elements = 9; + + // A list of visually detected tables on the page. + repeated Table tables = 10; + + // A list of visually detected form fields on the page. + repeated FormField form_fields = 11; + } + + // A phrase in the text that is a known entity type, such as a person, an + // organization, or location. + message Entity { + // Provenance of the entity. + // Text anchor indexing into the + // [Document.text][google.cloud.documentai.v1beta1.Document.text]. + TextAnchor text_anchor = 1; + + // Entity type from a schema e.g. `Address`. + string type = 2; + + // Text value in the document e.g. `1600 Amphitheatre Pkwy`. + string mention_text = 3; + + // Canonical mention name. This will be a unique value in the entity list + // for this document. + string mention_id = 4; + } + + // Relationship between + // [Entities][google.cloud.documentai.v1beta1.Document.Entity]. + message EntityRelation { + // Subject entity mention_id. + string subject_id = 1; + + // Object entity mention_id. + string object_id = 2; + + // Relationship description. + string relation = 3; + } + + // Text reference indexing into the + // [Document.text][google.cloud.documentai.v1beta1.Document.text]. + message TextAnchor { + // A text segment in the + // [Document.text][google.cloud.documentai.v1beta1.Document.text]. The + // indices may be out of bounds which indicate that the text extends into + // another document shard for large sharded documents. See + // [ShardInfo.text_offset][google.cloud.documentai.v1beta1.Document.ShardInfo.text_offset] + message TextSegment { + // [TextSegment][google.cloud.documentai.v1beta1.Document.TextAnchor.TextSegment] + // start UTF-8 char index in the + // [Document.text][google.cloud.documentai.v1beta1.Document.text]. + int64 start_index = 1; + + // [TextSegment][google.cloud.documentai.v1beta1.Document.TextAnchor.TextSegment] + // half open end UTF-8 char index in the + // [Document.text][google.cloud.documentai.v1beta1.Document.text]. + int64 end_index = 2; + } + + // The text segments from the + // [Document.text][google.cloud.documentai.v1beta1.Document.text]. + repeated TextSegment text_segments = 1; + } + + // Original source document from the user. + oneof source { + // Currently supports Google Cloud Storage URI of the form + // `gs://bucket_name/object_name`. Object versioning is not supported. + // See [Google Cloud Storage Request + // URIs](https://cloud.google.com/storage/docs/reference-uris) for more + // info. + string uri = 1; + + // Inline document content, represented as a stream of bytes. + // Note: As with all `bytes` fields, protobuffers use a pure binary + // representation, whereas JSON representations use base64. + bytes content = 2; + } + + // An IANA published MIME type (also referred to as media type). For more + // information, see + // https://www.iana.org/assignments/media-types/media-types.xhtml. + string mime_type = 3; + + // UTF-8 encoded text in reading order from the document. + string text = 4; + + // Styles for the + // [Document.text][google.cloud.documentai.v1beta1.Document.text]. + repeated Style text_styles = 5; + + // Visual page layout for the + // [Document][google.cloud.documentai.v1beta1.Document]. + repeated Page pages = 6; + + // A list of entities detected on + // [Document.text][google.cloud.documentai.v1beta1.Document.text]. For + // document shards, entities in this list may cross shard boundaries. + repeated Entity entities = 7; + + // Relationship among + // [Document.entities][google.cloud.documentai.v1beta1.Document.entities]. + repeated EntityRelation entity_relations = 8; + + // Information about the sharding if this document is sharded part of a larger + // document. If the document is not sharded, this message is not specified. + ShardInfo shard_info = 9; + + // Any error that occurred while processing this document. + google.rpc.Status error = 10; +} diff --git a/documentai/google/cloud/documentai_v1beta1/proto/document_pb2.py b/documentai/google/cloud/documentai_v1beta1/proto/document_pb2.py new file mode 100644 index 000000000000..73c90d366c5d --- /dev/null +++ b/documentai/google/cloud/documentai_v1beta1/proto/document_pb2.py @@ -0,0 +1,2695 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/documentai_v1beta1/proto/document.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.documentai_v1beta1.proto import ( + geometry_pb2 as google_dot_cloud_dot_documentai__v1beta1_dot_proto_dot_geometry__pb2, +) +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.type import color_pb2 as google_dot_type_dot_color__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/documentai_v1beta1/proto/document.proto", + package="google.cloud.documentai.v1beta1", + syntax="proto3", + serialized_options=_b( + "\n#com.google.cloud.documentai.v1beta1B\rDocumentProtoP\001ZIgoogle.golang.org/genproto/googleapis/cloud/documentai/v1beta1;documentai" + ), + serialized_pb=_b( + '\n4google/cloud/documentai_v1beta1/proto/document.proto\x12\x1fgoogle.cloud.documentai.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x34google/cloud/documentai_v1beta1/proto/geometry.proto\x1a\x17google/rpc/status.proto\x1a\x17google/type/color.proto"\xfa$\n\x08\x44ocument\x12\r\n\x03uri\x18\x01 \x01(\tH\x00\x12\x11\n\x07\x63ontent\x18\x02 \x01(\x0cH\x00\x12\x11\n\tmime_type\x18\x03 \x01(\t\x12\x0c\n\x04text\x18\x04 \x01(\t\x12\x44\n\x0btext_styles\x18\x05 \x03(\x0b\x32/.google.cloud.documentai.v1beta1.Document.Style\x12=\n\x05pages\x18\x06 \x03(\x0b\x32..google.cloud.documentai.v1beta1.Document.Page\x12\x42\n\x08\x65ntities\x18\x07 \x03(\x0b\x32\x30.google.cloud.documentai.v1beta1.Document.Entity\x12R\n\x10\x65ntity_relations\x18\x08 \x03(\x0b\x32\x38.google.cloud.documentai.v1beta1.Document.EntityRelation\x12G\n\nshard_info\x18\t \x01(\x0b\x32\x33.google.cloud.documentai.v1beta1.Document.ShardInfo\x12!\n\x05\x65rror\x18\n \x01(\x0b\x32\x12.google.rpc.Status\x1aJ\n\tShardInfo\x12\x13\n\x0bshard_index\x18\x01 \x01(\x03\x12\x13\n\x0bshard_count\x18\x02 \x01(\x03\x12\x13\n\x0btext_offset\x18\x03 \x01(\x03\x1a\xda\x02\n\x05Style\x12I\n\x0btext_anchor\x18\x01 \x01(\x0b\x32\x34.google.cloud.documentai.v1beta1.Document.TextAnchor\x12!\n\x05\x63olor\x18\x02 \x01(\x0b\x32\x12.google.type.Color\x12,\n\x10\x62\x61\x63kground_color\x18\x03 \x01(\x0b\x32\x12.google.type.Color\x12\x13\n\x0b\x66ont_weight\x18\x04 \x01(\t\x12\x12\n\ntext_style\x18\x05 \x01(\t\x12\x17\n\x0ftext_decoration\x18\x06 \x01(\t\x12K\n\tfont_size\x18\x07 \x01(\x0b\x32\x38.google.cloud.documentai.v1beta1.Document.Style.FontSize\x1a&\n\x08\x46ontSize\x12\x0c\n\x04size\x18\x01 \x01(\x02\x12\x0c\n\x04unit\x18\x02 \x01(\t\x1a\xf6\x1a\n\x04Page\x12\x13\n\x0bpage_number\x18\x01 \x01(\x05\x12K\n\tdimension\x18\x02 \x01(\x0b\x32\x38.google.cloud.documentai.v1beta1.Document.Page.Dimension\x12\x45\n\x06layout\x18\x03 \x01(\x0b\x32\x35.google.cloud.documentai.v1beta1.Document.Page.Layout\x12[\n\x12\x64\x65tected_languages\x18\x04 \x03(\x0b\x32?.google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage\x12\x44\n\x06\x62locks\x18\x05 \x03(\x0b\x32\x34.google.cloud.documentai.v1beta1.Document.Page.Block\x12L\n\nparagraphs\x18\x06 \x03(\x0b\x32\x38.google.cloud.documentai.v1beta1.Document.Page.Paragraph\x12\x42\n\x05lines\x18\x07 \x03(\x0b\x32\x33.google.cloud.documentai.v1beta1.Document.Page.Line\x12\x44\n\x06tokens\x18\x08 \x03(\x0b\x32\x34.google.cloud.documentai.v1beta1.Document.Page.Token\x12U\n\x0fvisual_elements\x18\t \x03(\x0b\x32<.google.cloud.documentai.v1beta1.Document.Page.VisualElement\x12\x44\n\x06tables\x18\n \x03(\x0b\x32\x34.google.cloud.documentai.v1beta1.Document.Page.Table\x12M\n\x0b\x66orm_fields\x18\x0b \x03(\x0b\x32\x38.google.cloud.documentai.v1beta1.Document.Page.FormField\x1a\x38\n\tDimension\x12\r\n\x05width\x18\x01 \x01(\x02\x12\x0e\n\x06height\x18\x02 \x01(\x02\x12\x0c\n\x04unit\x18\x03 \x01(\t\x1a\xec\x02\n\x06Layout\x12I\n\x0btext_anchor\x18\x01 \x01(\x0b\x32\x34.google.cloud.documentai.v1beta1.Document.TextAnchor\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\x44\n\rbounding_poly\x18\x03 \x01(\x0b\x32-.google.cloud.documentai.v1beta1.BoundingPoly\x12V\n\x0borientation\x18\x04 \x01(\x0e\x32\x41.google.cloud.documentai.v1beta1.Document.Page.Layout.Orientation"e\n\x0bOrientation\x12\x1b\n\x17ORIENTATION_UNSPECIFIED\x10\x00\x12\x0b\n\x07PAGE_UP\x10\x01\x12\x0e\n\nPAGE_RIGHT\x10\x02\x12\r\n\tPAGE_DOWN\x10\x03\x12\r\n\tPAGE_LEFT\x10\x04\x1a\xab\x01\n\x05\x42lock\x12\x45\n\x06layout\x18\x01 \x01(\x0b\x32\x35.google.cloud.documentai.v1beta1.Document.Page.Layout\x12[\n\x12\x64\x65tected_languages\x18\x02 \x03(\x0b\x32?.google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage\x1a\xaf\x01\n\tParagraph\x12\x45\n\x06layout\x18\x01 \x01(\x0b\x32\x35.google.cloud.documentai.v1beta1.Document.Page.Layout\x12[\n\x12\x64\x65tected_languages\x18\x02 \x03(\x0b\x32?.google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage\x1a\xaa\x01\n\x04Line\x12\x45\n\x06layout\x18\x01 \x01(\x0b\x32\x35.google.cloud.documentai.v1beta1.Document.Page.Layout\x12[\n\x12\x64\x65tected_languages\x18\x02 \x03(\x0b\x32?.google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage\x1a\xb5\x03\n\x05Token\x12\x45\n\x06layout\x18\x01 \x01(\x0b\x32\x35.google.cloud.documentai.v1beta1.Document.Page.Layout\x12Z\n\x0e\x64\x65tected_break\x18\x02 \x01(\x0b\x32\x42.google.cloud.documentai.v1beta1.Document.Page.Token.DetectedBreak\x12[\n\x12\x64\x65tected_languages\x18\x03 \x03(\x0b\x32?.google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage\x1a\xab\x01\n\rDetectedBreak\x12U\n\x04type\x18\x01 \x01(\x0e\x32G.google.cloud.documentai.v1beta1.Document.Page.Token.DetectedBreak.Type"C\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05SPACE\x10\x01\x12\x0e\n\nWIDE_SPACE\x10\x02\x12\n\n\x06HYPHEN\x10\x03\x1a\xc1\x01\n\rVisualElement\x12\x45\n\x06layout\x18\x01 \x01(\x0b\x32\x35.google.cloud.documentai.v1beta1.Document.Page.Layout\x12\x0c\n\x04type\x18\x02 \x01(\t\x12[\n\x12\x64\x65tected_languages\x18\x03 \x03(\x0b\x32?.google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage\x1a\x82\x05\n\x05Table\x12\x45\n\x06layout\x18\x01 \x01(\x0b\x32\x35.google.cloud.documentai.v1beta1.Document.Page.Layout\x12R\n\x0bheader_rows\x18\x02 \x03(\x0b\x32=.google.cloud.documentai.v1beta1.Document.Page.Table.TableRow\x12P\n\tbody_rows\x18\x03 \x03(\x0b\x32=.google.cloud.documentai.v1beta1.Document.Page.Table.TableRow\x12[\n\x12\x64\x65tected_languages\x18\x04 \x03(\x0b\x32?.google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage\x1aY\n\x08TableRow\x12M\n\x05\x63\x65lls\x18\x01 \x03(\x0b\x32>.google.cloud.documentai.v1beta1.Document.Page.Table.TableCell\x1a\xd3\x01\n\tTableCell\x12\x45\n\x06layout\x18\x01 \x01(\x0b\x32\x35.google.cloud.documentai.v1beta1.Document.Page.Layout\x12\x10\n\x08row_span\x18\x02 \x01(\x05\x12\x10\n\x08\x63ol_span\x18\x03 \x01(\x05\x12[\n\x12\x64\x65tected_languages\x18\x04 \x03(\x0b\x32?.google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage\x1a\xe7\x02\n\tFormField\x12I\n\nfield_name\x18\x01 \x01(\x0b\x32\x35.google.cloud.documentai.v1beta1.Document.Page.Layout\x12J\n\x0b\x66ield_value\x18\x02 \x01(\x0b\x32\x35.google.cloud.documentai.v1beta1.Document.Page.Layout\x12`\n\x17name_detected_languages\x18\x03 \x03(\x0b\x32?.google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage\x12\x61\n\x18value_detected_languages\x18\x04 \x03(\x0b\x32?.google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage\x1a=\n\x10\x44\x65tectedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x1a\x8b\x01\n\x06\x45ntity\x12I\n\x0btext_anchor\x18\x01 \x01(\x0b\x32\x34.google.cloud.documentai.v1beta1.Document.TextAnchor\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x14\n\x0cmention_text\x18\x03 \x01(\t\x12\x12\n\nmention_id\x18\x04 \x01(\t\x1aI\n\x0e\x45ntityRelation\x12\x12\n\nsubject_id\x18\x01 \x01(\t\x12\x11\n\tobject_id\x18\x02 \x01(\t\x12\x10\n\x08relation\x18\x03 \x01(\t\x1a\x9c\x01\n\nTextAnchor\x12W\n\rtext_segments\x18\x01 \x03(\x0b\x32@.google.cloud.documentai.v1beta1.Document.TextAnchor.TextSegment\x1a\x35\n\x0bTextSegment\x12\x13\n\x0bstart_index\x18\x01 \x01(\x03\x12\x11\n\tend_index\x18\x02 \x01(\x03\x42\x08\n\x06sourceB\x81\x01\n#com.google.cloud.documentai.v1beta1B\rDocumentProtoP\x01ZIgoogle.golang.org/genproto/googleapis/cloud/documentai/v1beta1;documentaib\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_cloud_dot_documentai__v1beta1_dot_proto_dot_geometry__pb2.DESCRIPTOR, + google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_type_dot_color__pb2.DESCRIPTOR, + ], +) + + +_DOCUMENT_PAGE_LAYOUT_ORIENTATION = _descriptor.EnumDescriptor( + name="Orientation", + full_name="google.cloud.documentai.v1beta1.Document.Page.Layout.Orientation", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="ORIENTATION_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="PAGE_UP", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="PAGE_RIGHT", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="PAGE_DOWN", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="PAGE_LEFT", index=4, number=4, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=2236, + serialized_end=2337, +) +_sym_db.RegisterEnumDescriptor(_DOCUMENT_PAGE_LAYOUT_ORIENTATION) + +_DOCUMENT_PAGE_TOKEN_DETECTEDBREAK_TYPE = _descriptor.EnumDescriptor( + name="Type", + full_name="google.cloud.documentai.v1beta1.Document.Page.Token.DetectedBreak.Type", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="TYPE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="SPACE", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="WIDE_SPACE", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="HYPHEN", index=3, number=3, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=3235, + serialized_end=3302, +) +_sym_db.RegisterEnumDescriptor(_DOCUMENT_PAGE_TOKEN_DETECTEDBREAK_TYPE) + + +_DOCUMENT_SHARDINFO = _descriptor.Descriptor( + name="ShardInfo", + full_name="google.cloud.documentai.v1beta1.Document.ShardInfo", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="shard_index", + full_name="google.cloud.documentai.v1beta1.Document.ShardInfo.shard_index", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="shard_count", + full_name="google.cloud.documentai.v1beta1.Document.ShardInfo.shard_count", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="text_offset", + full_name="google.cloud.documentai.v1beta1.Document.ShardInfo.text_offset", + index=2, + number=3, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=696, + serialized_end=770, +) + +_DOCUMENT_STYLE_FONTSIZE = _descriptor.Descriptor( + name="FontSize", + full_name="google.cloud.documentai.v1beta1.Document.Style.FontSize", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="size", + full_name="google.cloud.documentai.v1beta1.Document.Style.FontSize.size", + index=0, + number=1, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="unit", + full_name="google.cloud.documentai.v1beta1.Document.Style.FontSize.unit", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1081, + serialized_end=1119, +) + +_DOCUMENT_STYLE = _descriptor.Descriptor( + name="Style", + full_name="google.cloud.documentai.v1beta1.Document.Style", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="text_anchor", + full_name="google.cloud.documentai.v1beta1.Document.Style.text_anchor", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="color", + full_name="google.cloud.documentai.v1beta1.Document.Style.color", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="background_color", + full_name="google.cloud.documentai.v1beta1.Document.Style.background_color", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="font_weight", + full_name="google.cloud.documentai.v1beta1.Document.Style.font_weight", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="text_style", + full_name="google.cloud.documentai.v1beta1.Document.Style.text_style", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="text_decoration", + full_name="google.cloud.documentai.v1beta1.Document.Style.text_decoration", + index=5, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="font_size", + full_name="google.cloud.documentai.v1beta1.Document.Style.font_size", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_DOCUMENT_STYLE_FONTSIZE], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=773, + serialized_end=1119, +) + +_DOCUMENT_PAGE_DIMENSION = _descriptor.Descriptor( + name="Dimension", + full_name="google.cloud.documentai.v1beta1.Document.Page.Dimension", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="width", + full_name="google.cloud.documentai.v1beta1.Document.Page.Dimension.width", + index=0, + number=1, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="height", + full_name="google.cloud.documentai.v1beta1.Document.Page.Dimension.height", + index=1, + number=2, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="unit", + full_name="google.cloud.documentai.v1beta1.Document.Page.Dimension.unit", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1914, + serialized_end=1970, +) + +_DOCUMENT_PAGE_LAYOUT = _descriptor.Descriptor( + name="Layout", + full_name="google.cloud.documentai.v1beta1.Document.Page.Layout", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="text_anchor", + full_name="google.cloud.documentai.v1beta1.Document.Page.Layout.text_anchor", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="confidence", + full_name="google.cloud.documentai.v1beta1.Document.Page.Layout.confidence", + index=1, + number=2, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="bounding_poly", + full_name="google.cloud.documentai.v1beta1.Document.Page.Layout.bounding_poly", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="orientation", + full_name="google.cloud.documentai.v1beta1.Document.Page.Layout.orientation", + index=3, + number=4, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_DOCUMENT_PAGE_LAYOUT_ORIENTATION], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1973, + serialized_end=2337, +) + +_DOCUMENT_PAGE_BLOCK = _descriptor.Descriptor( + name="Block", + full_name="google.cloud.documentai.v1beta1.Document.Page.Block", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="layout", + full_name="google.cloud.documentai.v1beta1.Document.Page.Block.layout", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="detected_languages", + full_name="google.cloud.documentai.v1beta1.Document.Page.Block.detected_languages", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2340, + serialized_end=2511, +) + +_DOCUMENT_PAGE_PARAGRAPH = _descriptor.Descriptor( + name="Paragraph", + full_name="google.cloud.documentai.v1beta1.Document.Page.Paragraph", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="layout", + full_name="google.cloud.documentai.v1beta1.Document.Page.Paragraph.layout", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="detected_languages", + full_name="google.cloud.documentai.v1beta1.Document.Page.Paragraph.detected_languages", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2514, + serialized_end=2689, +) + +_DOCUMENT_PAGE_LINE = _descriptor.Descriptor( + name="Line", + full_name="google.cloud.documentai.v1beta1.Document.Page.Line", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="layout", + full_name="google.cloud.documentai.v1beta1.Document.Page.Line.layout", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="detected_languages", + full_name="google.cloud.documentai.v1beta1.Document.Page.Line.detected_languages", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2692, + serialized_end=2862, +) + +_DOCUMENT_PAGE_TOKEN_DETECTEDBREAK = _descriptor.Descriptor( + name="DetectedBreak", + full_name="google.cloud.documentai.v1beta1.Document.Page.Token.DetectedBreak", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="type", + full_name="google.cloud.documentai.v1beta1.Document.Page.Token.DetectedBreak.type", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[_DOCUMENT_PAGE_TOKEN_DETECTEDBREAK_TYPE], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3131, + serialized_end=3302, +) + +_DOCUMENT_PAGE_TOKEN = _descriptor.Descriptor( + name="Token", + full_name="google.cloud.documentai.v1beta1.Document.Page.Token", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="layout", + full_name="google.cloud.documentai.v1beta1.Document.Page.Token.layout", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="detected_break", + full_name="google.cloud.documentai.v1beta1.Document.Page.Token.detected_break", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="detected_languages", + full_name="google.cloud.documentai.v1beta1.Document.Page.Token.detected_languages", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_DOCUMENT_PAGE_TOKEN_DETECTEDBREAK], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2865, + serialized_end=3302, +) + +_DOCUMENT_PAGE_VISUALELEMENT = _descriptor.Descriptor( + name="VisualElement", + full_name="google.cloud.documentai.v1beta1.Document.Page.VisualElement", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="layout", + full_name="google.cloud.documentai.v1beta1.Document.Page.VisualElement.layout", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="type", + full_name="google.cloud.documentai.v1beta1.Document.Page.VisualElement.type", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="detected_languages", + full_name="google.cloud.documentai.v1beta1.Document.Page.VisualElement.detected_languages", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3305, + serialized_end=3498, +) + +_DOCUMENT_PAGE_TABLE_TABLEROW = _descriptor.Descriptor( + name="TableRow", + full_name="google.cloud.documentai.v1beta1.Document.Page.Table.TableRow", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="cells", + full_name="google.cloud.documentai.v1beta1.Document.Page.Table.TableRow.cells", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3840, + serialized_end=3929, +) + +_DOCUMENT_PAGE_TABLE_TABLECELL = _descriptor.Descriptor( + name="TableCell", + full_name="google.cloud.documentai.v1beta1.Document.Page.Table.TableCell", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="layout", + full_name="google.cloud.documentai.v1beta1.Document.Page.Table.TableCell.layout", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="row_span", + full_name="google.cloud.documentai.v1beta1.Document.Page.Table.TableCell.row_span", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="col_span", + full_name="google.cloud.documentai.v1beta1.Document.Page.Table.TableCell.col_span", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="detected_languages", + full_name="google.cloud.documentai.v1beta1.Document.Page.Table.TableCell.detected_languages", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3932, + serialized_end=4143, +) + +_DOCUMENT_PAGE_TABLE = _descriptor.Descriptor( + name="Table", + full_name="google.cloud.documentai.v1beta1.Document.Page.Table", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="layout", + full_name="google.cloud.documentai.v1beta1.Document.Page.Table.layout", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="header_rows", + full_name="google.cloud.documentai.v1beta1.Document.Page.Table.header_rows", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="body_rows", + full_name="google.cloud.documentai.v1beta1.Document.Page.Table.body_rows", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="detected_languages", + full_name="google.cloud.documentai.v1beta1.Document.Page.Table.detected_languages", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_DOCUMENT_PAGE_TABLE_TABLEROW, _DOCUMENT_PAGE_TABLE_TABLECELL], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3501, + serialized_end=4143, +) + +_DOCUMENT_PAGE_FORMFIELD = _descriptor.Descriptor( + name="FormField", + full_name="google.cloud.documentai.v1beta1.Document.Page.FormField", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="field_name", + full_name="google.cloud.documentai.v1beta1.Document.Page.FormField.field_name", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="field_value", + full_name="google.cloud.documentai.v1beta1.Document.Page.FormField.field_value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="name_detected_languages", + full_name="google.cloud.documentai.v1beta1.Document.Page.FormField.name_detected_languages", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value_detected_languages", + full_name="google.cloud.documentai.v1beta1.Document.Page.FormField.value_detected_languages", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4146, + serialized_end=4505, +) + +_DOCUMENT_PAGE_DETECTEDLANGUAGE = _descriptor.Descriptor( + name="DetectedLanguage", + full_name="google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="language_code", + full_name="google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage.language_code", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="confidence", + full_name="google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage.confidence", + index=1, + number=2, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4507, + serialized_end=4568, +) + +_DOCUMENT_PAGE = _descriptor.Descriptor( + name="Page", + full_name="google.cloud.documentai.v1beta1.Document.Page", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="page_number", + full_name="google.cloud.documentai.v1beta1.Document.Page.page_number", + index=0, + number=1, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="dimension", + full_name="google.cloud.documentai.v1beta1.Document.Page.dimension", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="layout", + full_name="google.cloud.documentai.v1beta1.Document.Page.layout", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="detected_languages", + full_name="google.cloud.documentai.v1beta1.Document.Page.detected_languages", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="blocks", + full_name="google.cloud.documentai.v1beta1.Document.Page.blocks", + index=4, + number=5, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="paragraphs", + full_name="google.cloud.documentai.v1beta1.Document.Page.paragraphs", + index=5, + number=6, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="lines", + full_name="google.cloud.documentai.v1beta1.Document.Page.lines", + index=6, + number=7, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="tokens", + full_name="google.cloud.documentai.v1beta1.Document.Page.tokens", + index=7, + number=8, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="visual_elements", + full_name="google.cloud.documentai.v1beta1.Document.Page.visual_elements", + index=8, + number=9, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="tables", + full_name="google.cloud.documentai.v1beta1.Document.Page.tables", + index=9, + number=10, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="form_fields", + full_name="google.cloud.documentai.v1beta1.Document.Page.form_fields", + index=10, + number=11, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[ + _DOCUMENT_PAGE_DIMENSION, + _DOCUMENT_PAGE_LAYOUT, + _DOCUMENT_PAGE_BLOCK, + _DOCUMENT_PAGE_PARAGRAPH, + _DOCUMENT_PAGE_LINE, + _DOCUMENT_PAGE_TOKEN, + _DOCUMENT_PAGE_VISUALELEMENT, + _DOCUMENT_PAGE_TABLE, + _DOCUMENT_PAGE_FORMFIELD, + _DOCUMENT_PAGE_DETECTEDLANGUAGE, + ], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1122, + serialized_end=4568, +) + +_DOCUMENT_ENTITY = _descriptor.Descriptor( + name="Entity", + full_name="google.cloud.documentai.v1beta1.Document.Entity", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="text_anchor", + full_name="google.cloud.documentai.v1beta1.Document.Entity.text_anchor", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="type", + full_name="google.cloud.documentai.v1beta1.Document.Entity.type", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mention_text", + full_name="google.cloud.documentai.v1beta1.Document.Entity.mention_text", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mention_id", + full_name="google.cloud.documentai.v1beta1.Document.Entity.mention_id", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4571, + serialized_end=4710, +) + +_DOCUMENT_ENTITYRELATION = _descriptor.Descriptor( + name="EntityRelation", + full_name="google.cloud.documentai.v1beta1.Document.EntityRelation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="subject_id", + full_name="google.cloud.documentai.v1beta1.Document.EntityRelation.subject_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="object_id", + full_name="google.cloud.documentai.v1beta1.Document.EntityRelation.object_id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="relation", + full_name="google.cloud.documentai.v1beta1.Document.EntityRelation.relation", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4712, + serialized_end=4785, +) + +_DOCUMENT_TEXTANCHOR_TEXTSEGMENT = _descriptor.Descriptor( + name="TextSegment", + full_name="google.cloud.documentai.v1beta1.Document.TextAnchor.TextSegment", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="start_index", + full_name="google.cloud.documentai.v1beta1.Document.TextAnchor.TextSegment.start_index", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_index", + full_name="google.cloud.documentai.v1beta1.Document.TextAnchor.TextSegment.end_index", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4891, + serialized_end=4944, +) + +_DOCUMENT_TEXTANCHOR = _descriptor.Descriptor( + name="TextAnchor", + full_name="google.cloud.documentai.v1beta1.Document.TextAnchor", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="text_segments", + full_name="google.cloud.documentai.v1beta1.Document.TextAnchor.text_segments", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[_DOCUMENT_TEXTANCHOR_TEXTSEGMENT], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4788, + serialized_end=4944, +) + +_DOCUMENT = _descriptor.Descriptor( + name="Document", + full_name="google.cloud.documentai.v1beta1.Document", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="uri", + full_name="google.cloud.documentai.v1beta1.Document.uri", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="content", + full_name="google.cloud.documentai.v1beta1.Document.content", + index=1, + number=2, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mime_type", + full_name="google.cloud.documentai.v1beta1.Document.mime_type", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="text", + full_name="google.cloud.documentai.v1beta1.Document.text", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="text_styles", + full_name="google.cloud.documentai.v1beta1.Document.text_styles", + index=4, + number=5, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="pages", + full_name="google.cloud.documentai.v1beta1.Document.pages", + index=5, + number=6, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entities", + full_name="google.cloud.documentai.v1beta1.Document.entities", + index=6, + number=7, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entity_relations", + full_name="google.cloud.documentai.v1beta1.Document.entity_relations", + index=7, + number=8, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="shard_info", + full_name="google.cloud.documentai.v1beta1.Document.shard_info", + index=8, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="error", + full_name="google.cloud.documentai.v1beta1.Document.error", + index=9, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[ + _DOCUMENT_SHARDINFO, + _DOCUMENT_STYLE, + _DOCUMENT_PAGE, + _DOCUMENT_ENTITY, + _DOCUMENT_ENTITYRELATION, + _DOCUMENT_TEXTANCHOR, + ], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="source", + full_name="google.cloud.documentai.v1beta1.Document.source", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=224, + serialized_end=4954, +) + +_DOCUMENT_SHARDINFO.containing_type = _DOCUMENT +_DOCUMENT_STYLE_FONTSIZE.containing_type = _DOCUMENT_STYLE +_DOCUMENT_STYLE.fields_by_name["text_anchor"].message_type = _DOCUMENT_TEXTANCHOR +_DOCUMENT_STYLE.fields_by_name[ + "color" +].message_type = google_dot_type_dot_color__pb2._COLOR +_DOCUMENT_STYLE.fields_by_name[ + "background_color" +].message_type = google_dot_type_dot_color__pb2._COLOR +_DOCUMENT_STYLE.fields_by_name["font_size"].message_type = _DOCUMENT_STYLE_FONTSIZE +_DOCUMENT_STYLE.containing_type = _DOCUMENT +_DOCUMENT_PAGE_DIMENSION.containing_type = _DOCUMENT_PAGE +_DOCUMENT_PAGE_LAYOUT.fields_by_name["text_anchor"].message_type = _DOCUMENT_TEXTANCHOR +_DOCUMENT_PAGE_LAYOUT.fields_by_name[ + "bounding_poly" +].message_type = ( + google_dot_cloud_dot_documentai__v1beta1_dot_proto_dot_geometry__pb2._BOUNDINGPOLY +) +_DOCUMENT_PAGE_LAYOUT.fields_by_name[ + "orientation" +].enum_type = _DOCUMENT_PAGE_LAYOUT_ORIENTATION +_DOCUMENT_PAGE_LAYOUT.containing_type = _DOCUMENT_PAGE +_DOCUMENT_PAGE_LAYOUT_ORIENTATION.containing_type = _DOCUMENT_PAGE_LAYOUT +_DOCUMENT_PAGE_BLOCK.fields_by_name["layout"].message_type = _DOCUMENT_PAGE_LAYOUT +_DOCUMENT_PAGE_BLOCK.fields_by_name[ + "detected_languages" +].message_type = _DOCUMENT_PAGE_DETECTEDLANGUAGE +_DOCUMENT_PAGE_BLOCK.containing_type = _DOCUMENT_PAGE +_DOCUMENT_PAGE_PARAGRAPH.fields_by_name["layout"].message_type = _DOCUMENT_PAGE_LAYOUT +_DOCUMENT_PAGE_PARAGRAPH.fields_by_name[ + "detected_languages" +].message_type = _DOCUMENT_PAGE_DETECTEDLANGUAGE +_DOCUMENT_PAGE_PARAGRAPH.containing_type = _DOCUMENT_PAGE +_DOCUMENT_PAGE_LINE.fields_by_name["layout"].message_type = _DOCUMENT_PAGE_LAYOUT +_DOCUMENT_PAGE_LINE.fields_by_name[ + "detected_languages" +].message_type = _DOCUMENT_PAGE_DETECTEDLANGUAGE +_DOCUMENT_PAGE_LINE.containing_type = _DOCUMENT_PAGE +_DOCUMENT_PAGE_TOKEN_DETECTEDBREAK.fields_by_name[ + "type" +].enum_type = _DOCUMENT_PAGE_TOKEN_DETECTEDBREAK_TYPE +_DOCUMENT_PAGE_TOKEN_DETECTEDBREAK.containing_type = _DOCUMENT_PAGE_TOKEN +_DOCUMENT_PAGE_TOKEN_DETECTEDBREAK_TYPE.containing_type = ( + _DOCUMENT_PAGE_TOKEN_DETECTEDBREAK +) +_DOCUMENT_PAGE_TOKEN.fields_by_name["layout"].message_type = _DOCUMENT_PAGE_LAYOUT +_DOCUMENT_PAGE_TOKEN.fields_by_name[ + "detected_break" +].message_type = _DOCUMENT_PAGE_TOKEN_DETECTEDBREAK +_DOCUMENT_PAGE_TOKEN.fields_by_name[ + "detected_languages" +].message_type = _DOCUMENT_PAGE_DETECTEDLANGUAGE +_DOCUMENT_PAGE_TOKEN.containing_type = _DOCUMENT_PAGE +_DOCUMENT_PAGE_VISUALELEMENT.fields_by_name[ + "layout" +].message_type = _DOCUMENT_PAGE_LAYOUT +_DOCUMENT_PAGE_VISUALELEMENT.fields_by_name[ + "detected_languages" +].message_type = _DOCUMENT_PAGE_DETECTEDLANGUAGE +_DOCUMENT_PAGE_VISUALELEMENT.containing_type = _DOCUMENT_PAGE +_DOCUMENT_PAGE_TABLE_TABLEROW.fields_by_name[ + "cells" +].message_type = _DOCUMENT_PAGE_TABLE_TABLECELL +_DOCUMENT_PAGE_TABLE_TABLEROW.containing_type = _DOCUMENT_PAGE_TABLE +_DOCUMENT_PAGE_TABLE_TABLECELL.fields_by_name[ + "layout" +].message_type = _DOCUMENT_PAGE_LAYOUT +_DOCUMENT_PAGE_TABLE_TABLECELL.fields_by_name[ + "detected_languages" +].message_type = _DOCUMENT_PAGE_DETECTEDLANGUAGE +_DOCUMENT_PAGE_TABLE_TABLECELL.containing_type = _DOCUMENT_PAGE_TABLE +_DOCUMENT_PAGE_TABLE.fields_by_name["layout"].message_type = _DOCUMENT_PAGE_LAYOUT +_DOCUMENT_PAGE_TABLE.fields_by_name[ + "header_rows" +].message_type = _DOCUMENT_PAGE_TABLE_TABLEROW +_DOCUMENT_PAGE_TABLE.fields_by_name[ + "body_rows" +].message_type = _DOCUMENT_PAGE_TABLE_TABLEROW +_DOCUMENT_PAGE_TABLE.fields_by_name[ + "detected_languages" +].message_type = _DOCUMENT_PAGE_DETECTEDLANGUAGE +_DOCUMENT_PAGE_TABLE.containing_type = _DOCUMENT_PAGE +_DOCUMENT_PAGE_FORMFIELD.fields_by_name[ + "field_name" +].message_type = _DOCUMENT_PAGE_LAYOUT +_DOCUMENT_PAGE_FORMFIELD.fields_by_name[ + "field_value" +].message_type = _DOCUMENT_PAGE_LAYOUT +_DOCUMENT_PAGE_FORMFIELD.fields_by_name[ + "name_detected_languages" +].message_type = _DOCUMENT_PAGE_DETECTEDLANGUAGE +_DOCUMENT_PAGE_FORMFIELD.fields_by_name[ + "value_detected_languages" +].message_type = _DOCUMENT_PAGE_DETECTEDLANGUAGE +_DOCUMENT_PAGE_FORMFIELD.containing_type = _DOCUMENT_PAGE +_DOCUMENT_PAGE_DETECTEDLANGUAGE.containing_type = _DOCUMENT_PAGE +_DOCUMENT_PAGE.fields_by_name["dimension"].message_type = _DOCUMENT_PAGE_DIMENSION +_DOCUMENT_PAGE.fields_by_name["layout"].message_type = _DOCUMENT_PAGE_LAYOUT +_DOCUMENT_PAGE.fields_by_name[ + "detected_languages" +].message_type = _DOCUMENT_PAGE_DETECTEDLANGUAGE +_DOCUMENT_PAGE.fields_by_name["blocks"].message_type = _DOCUMENT_PAGE_BLOCK +_DOCUMENT_PAGE.fields_by_name["paragraphs"].message_type = _DOCUMENT_PAGE_PARAGRAPH +_DOCUMENT_PAGE.fields_by_name["lines"].message_type = _DOCUMENT_PAGE_LINE +_DOCUMENT_PAGE.fields_by_name["tokens"].message_type = _DOCUMENT_PAGE_TOKEN +_DOCUMENT_PAGE.fields_by_name[ + "visual_elements" +].message_type = _DOCUMENT_PAGE_VISUALELEMENT +_DOCUMENT_PAGE.fields_by_name["tables"].message_type = _DOCUMENT_PAGE_TABLE +_DOCUMENT_PAGE.fields_by_name["form_fields"].message_type = _DOCUMENT_PAGE_FORMFIELD +_DOCUMENT_PAGE.containing_type = _DOCUMENT +_DOCUMENT_ENTITY.fields_by_name["text_anchor"].message_type = _DOCUMENT_TEXTANCHOR +_DOCUMENT_ENTITY.containing_type = _DOCUMENT +_DOCUMENT_ENTITYRELATION.containing_type = _DOCUMENT +_DOCUMENT_TEXTANCHOR_TEXTSEGMENT.containing_type = _DOCUMENT_TEXTANCHOR +_DOCUMENT_TEXTANCHOR.fields_by_name[ + "text_segments" +].message_type = _DOCUMENT_TEXTANCHOR_TEXTSEGMENT +_DOCUMENT_TEXTANCHOR.containing_type = _DOCUMENT +_DOCUMENT.fields_by_name["text_styles"].message_type = _DOCUMENT_STYLE +_DOCUMENT.fields_by_name["pages"].message_type = _DOCUMENT_PAGE +_DOCUMENT.fields_by_name["entities"].message_type = _DOCUMENT_ENTITY +_DOCUMENT.fields_by_name["entity_relations"].message_type = _DOCUMENT_ENTITYRELATION +_DOCUMENT.fields_by_name["shard_info"].message_type = _DOCUMENT_SHARDINFO +_DOCUMENT.fields_by_name["error"].message_type = google_dot_rpc_dot_status__pb2._STATUS +_DOCUMENT.oneofs_by_name["source"].fields.append(_DOCUMENT.fields_by_name["uri"]) +_DOCUMENT.fields_by_name["uri"].containing_oneof = _DOCUMENT.oneofs_by_name["source"] +_DOCUMENT.oneofs_by_name["source"].fields.append(_DOCUMENT.fields_by_name["content"]) +_DOCUMENT.fields_by_name["content"].containing_oneof = _DOCUMENT.oneofs_by_name[ + "source" +] +DESCRIPTOR.message_types_by_name["Document"] = _DOCUMENT +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Document = _reflection.GeneratedProtocolMessageType( + "Document", + (_message.Message,), + dict( + ShardInfo=_reflection.GeneratedProtocolMessageType( + "ShardInfo", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_SHARDINFO, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""For a large document, sharding may be performed to produce several + document shards. Each document shard contains this field to detail which + shard it is. + + + Attributes: + shard_index: + The 0-based index of this shard. + shard_count: + Total number of shards. + text_offset: + The index of the first character in + [Document.text][google.cloud.documentai.v1beta1.Document.text] + in the overall document global text. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.ShardInfo) + ), + ), + Style=_reflection.GeneratedProtocolMessageType( + "Style", + (_message.Message,), + dict( + FontSize=_reflection.GeneratedProtocolMessageType( + "FontSize", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_STYLE_FONTSIZE, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""Font size with unit. + + + Attributes: + size: + Font size for the text. + unit: + Unit for the font size. Follows CSS naming (in, px, pt, etc.). + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Style.FontSize) + ), + ), + DESCRIPTOR=_DOCUMENT_STYLE, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""Annotation for common text style attributes. This adheres to CSS + conventions as much as possible. + + + Attributes: + text_anchor: + Text anchor indexing into the [Document.text][google.cloud.doc + umentai.v1beta1.Document.text]. + color: + Text color. + background_color: + Text background color. + font_weight: + Font weight. Possible values are normal, bold, bolder, and + lighter. https://www.w3schools.com/cssref/pr\_font\_weight.asp + text_style: + Text style. Possible values are normal, italic, and oblique. + https://www.w3schools.com/cssref/pr\_font\_font-style.asp + text_decoration: + Text decoration. Follows CSS standard. + https://www.w3schools.com/cssref/pr\_text\_text-decoration.asp + font_size: + Font size. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Style) + ), + ), + Page=_reflection.GeneratedProtocolMessageType( + "Page", + (_message.Message,), + dict( + Dimension=_reflection.GeneratedProtocolMessageType( + "Dimension", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_PAGE_DIMENSION, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""Dimension for the page. + + + Attributes: + width: + Page width. + height: + Page height. + unit: + Dimension unit. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.Dimension) + ), + ), + Layout=_reflection.GeneratedProtocolMessageType( + "Layout", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_PAGE_LAYOUT, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""Visual element describing a layout unit on a page. + + + Attributes: + text_anchor: + Text anchor indexing into the [Document.text][google.cloud.doc + umentai.v1beta1.Document.text]. + confidence: + Confidence of the current + [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] + within context of the object this layout is for. e.g. + confidence can be for a single token, a table, a visual + element, etc. depending on context. Range [0, 1]. + bounding_poly: + The bounding polygon for the [Layout][google.cloud.documentai. + v1beta1.Document.Page.Layout]. + orientation: + Detected orientation for the [Layout][google.cloud.documentai. + v1beta1.Document.Page.Layout]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.Layout) + ), + ), + Block=_reflection.GeneratedProtocolMessageType( + "Block", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_PAGE_BLOCK, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""A block has a set of lines (collected into paragraphs) that have a + common line-spacing and orientation. + + + Attributes: + layout: + [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] + for + [Block][google.cloud.documentai.v1beta1.Document.Page.Block]. + detected_languages: + A list of detected languages together with confidence. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.Block) + ), + ), + Paragraph=_reflection.GeneratedProtocolMessageType( + "Paragraph", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_PAGE_PARAGRAPH, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""A collection of lines that a human would perceive as a paragraph. + + + Attributes: + layout: + [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] + for [Paragraph][google.cloud.documentai.v1beta1.Document.Page. + Paragraph]. + detected_languages: + A list of detected languages together with confidence. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.Paragraph) + ), + ), + Line=_reflection.GeneratedProtocolMessageType( + "Line", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_PAGE_LINE, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""A collection of tokens that a human would perceive as a line. Does not + cross column boundaries, can be horizontal, vertical, etc. + + + Attributes: + layout: + [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] + for + [Line][google.cloud.documentai.v1beta1.Document.Page.Line]. + detected_languages: + A list of detected languages together with confidence. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.Line) + ), + ), + Token=_reflection.GeneratedProtocolMessageType( + "Token", + (_message.Message,), + dict( + DetectedBreak=_reflection.GeneratedProtocolMessageType( + "DetectedBreak", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_PAGE_TOKEN_DETECTEDBREAK, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""Detected break at the end of a + [Token][google.cloud.documentai.v1beta1.Document.Page.Token]. + + + Attributes: + type: + Detected break type. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.Token.DetectedBreak) + ), + ), + DESCRIPTOR=_DOCUMENT_PAGE_TOKEN, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""A detected token. + + + Attributes: + layout: + [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] + for + [Token][google.cloud.documentai.v1beta1.Document.Page.Token]. + detected_break: + Detected break at the end of a + [Token][google.cloud.documentai.v1beta1.Document.Page.Token]. + detected_languages: + A list of detected languages together with confidence. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.Token) + ), + ), + VisualElement=_reflection.GeneratedProtocolMessageType( + "VisualElement", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_PAGE_VISUALELEMENT, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""Detected non-text visual elements e.g. checkbox, signature etc. on the + page. + + + Attributes: + layout: + [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] + for + [Token][google.cloud.documentai.v1beta1.Document.Page.Token]. + type: + Type of the [VisualElement][google.cloud.documentai.v1beta1.Do + cument.Page.VisualElement]. + detected_languages: + A list of detected languages together with confidence. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.VisualElement) + ), + ), + Table=_reflection.GeneratedProtocolMessageType( + "Table", + (_message.Message,), + dict( + TableRow=_reflection.GeneratedProtocolMessageType( + "TableRow", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_PAGE_TABLE_TABLEROW, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""A row of table cells. + + + Attributes: + cells: + Cells that make up this row. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.Table.TableRow) + ), + ), + TableCell=_reflection.GeneratedProtocolMessageType( + "TableCell", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_PAGE_TABLE_TABLECELL, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""A cell representation inside the table. + + + Attributes: + layout: + [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] + for [TableCell][google.cloud.documentai.v1beta1.Document.Page. + Table.TableCell]. + row_span: + How many rows this cell spans. + col_span: + How many columns this cell spans. + detected_languages: + A list of detected languages together with confidence. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.Table.TableCell) + ), + ), + DESCRIPTOR=_DOCUMENT_PAGE_TABLE, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""A table representation similar to HTML table structure. + + + Attributes: + layout: + [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] + for + [Table][google.cloud.documentai.v1beta1.Document.Page.Table]. + header_rows: + Header rows of the table. + body_rows: + Body rows of the table. + detected_languages: + A list of detected languages together with confidence. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.Table) + ), + ), + FormField=_reflection.GeneratedProtocolMessageType( + "FormField", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_PAGE_FORMFIELD, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""A form field detected on the page. + + + Attributes: + field_name: + [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] + for the [FormField][google.cloud.documentai.v1beta1.Document.P + age.FormField] name. e.g. ``Address``, ``Email``, ``Grand + total``, ``Phone number``, etc. + field_value: + [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] + for the [FormField][google.cloud.documentai.v1beta1.Document.P + age.FormField] value. + name_detected_languages: + A list of detected languages for name together with + confidence. + value_detected_languages: + A list of detected languages for value together with + confidence. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.FormField) + ), + ), + DetectedLanguage=_reflection.GeneratedProtocolMessageType( + "DetectedLanguage", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_PAGE_DETECTEDLANGUAGE, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""Detected language for a structural component. + + + Attributes: + language_code: + The BCP-47 language code, such as "en-US" or "sr-Latn". For + more information, see http://www.unicode.org/reports/tr35/#Uni + code\_locale\_identifier. + confidence: + Confidence of detected language. Range [0, 1]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page.DetectedLanguage) + ), + ), + DESCRIPTOR=_DOCUMENT_PAGE, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""A page in a [Document][google.cloud.documentai.v1beta1.Document]. + + + Attributes: + page_number: + 1-based index for current + [Page][google.cloud.documentai.v1beta1.Document.Page] in a + parent [Document][google.cloud.documentai.v1beta1.Document]. + Useful when a page is taken out of a + [Document][google.cloud.documentai.v1beta1.Document] for + individual processing. + dimension: + Physical dimension of the page. + layout: + [Layout][google.cloud.documentai.v1beta1.Document.Page.Layout] + for the page. + detected_languages: + A list of detected languages together with confidence. + blocks: + A list of visually detected text blocks on the page. A block + has a set of lines (collected into paragraphs) that have a + common line-spacing and orientation. + paragraphs: + A list of visually detected text paragraphs on the page. A + collection of lines that a human would perceive as a + paragraph. + lines: + A list of visually detected text lines on the page. A + collection of tokens that a human would perceive as a line. + tokens: + A list of visually detected tokens on the page. + visual_elements: + A list of detected non-text visual elements e.g. checkbox, + signature etc. on the page. + tables: + A list of visually detected tables on the page. + form_fields: + A list of visually detected form fields on the page. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Page) + ), + ), + Entity=_reflection.GeneratedProtocolMessageType( + "Entity", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_ENTITY, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""A phrase in the text that is a known entity type, such as a person, an + organization, or location. + + + Attributes: + text_anchor: + Provenance of the entity. Text anchor indexing into the [Docum + ent.text][google.cloud.documentai.v1beta1.Document.text]. + type: + Entity type from a schema e.g. ``Address``. + mention_text: + Text value in the document e.g. ``1600 Amphitheatre Pkwy``. + mention_id: + Canonical mention name. This will be a unique value in the + entity list for this document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.Entity) + ), + ), + EntityRelation=_reflection.GeneratedProtocolMessageType( + "EntityRelation", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_ENTITYRELATION, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""Relationship between + [Entities][google.cloud.documentai.v1beta1.Document.Entity]. + + + Attributes: + subject_id: + Subject entity mention\_id. + object_id: + Object entity mention\_id. + relation: + Relationship description. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.EntityRelation) + ), + ), + TextAnchor=_reflection.GeneratedProtocolMessageType( + "TextAnchor", + (_message.Message,), + dict( + TextSegment=_reflection.GeneratedProtocolMessageType( + "TextSegment", + (_message.Message,), + dict( + DESCRIPTOR=_DOCUMENT_TEXTANCHOR_TEXTSEGMENT, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""A text segment in the + [Document.text][google.cloud.documentai.v1beta1.Document.text]. The + indices may be out of bounds which indicate that the text extends into + another document shard for large sharded documents. See + [ShardInfo.text\_offset][google.cloud.documentai.v1beta1.Document.ShardInfo.text\_offset] + + + Attributes: + start_index: + [TextSegment][google.cloud.documentai.v1beta1.Document.TextAnc + hor.TextSegment] start UTF-8 char index in the [Document.text] + [google.cloud.documentai.v1beta1.Document.text]. + end_index: + [TextSegment][google.cloud.documentai.v1beta1.Document.TextAnc + hor.TextSegment] half open end UTF-8 char index in the [Docume + nt.text][google.cloud.documentai.v1beta1.Document.text]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.TextAnchor.TextSegment) + ), + ), + DESCRIPTOR=_DOCUMENT_TEXTANCHOR, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""Text reference indexing into the + [Document.text][google.cloud.documentai.v1beta1.Document.text]. + + + Attributes: + text_segments: + The text segments from the [Document.text][google.cloud.docume + ntai.v1beta1.Document.text]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document.TextAnchor) + ), + ), + DESCRIPTOR=_DOCUMENT, + __module__="google.cloud.documentai_v1beta1.proto.document_pb2", + __doc__="""Document represents the canonical document resource in Document + Understanding AI. It is an interchange format that provides insights + into documents and allows for collaboration between users and Document + Understanding AI to iterate and optimize for quality. + + + Attributes: + source: + Original source document from the user. + uri: + Currently supports Google Cloud Storage URI of the form + ``gs://bucket_name/object_name``. Object versioning is not + supported. See `Google Cloud Storage Request URIs + `__ for + more info. + content: + Inline document content, represented as a stream of bytes. + Note: As with all ``bytes`` fields, protobuffers use a pure + binary representation, whereas JSON representations use + base64. + mime_type: + An IANA published MIME type (also referred to as media type). + For more information, see + https://www.iana.org/assignments/media-types/media- + types.xhtml. + text: + UTF-8 encoded text in reading order from the document. + text_styles: + Styles for the [Document.text][google.cloud.documentai.v1beta1 + .Document.text]. + pages: + Visual page layout for the + [Document][google.cloud.documentai.v1beta1.Document]. + entities: + A list of entities detected on [Document.text][google.cloud.do + cumentai.v1beta1.Document.text]. For document shards, entities + in this list may cross shard boundaries. + entity_relations: + Relationship among [Document.entities][google.cloud.documentai + .v1beta1.Document.entities]. + shard_info: + Information about the sharding if this document is sharded + part of a larger document. If the document is not sharded, + this message is not specified. + error: + Any error that occurred while processing this document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Document) + ), +) +_sym_db.RegisterMessage(Document) +_sym_db.RegisterMessage(Document.ShardInfo) +_sym_db.RegisterMessage(Document.Style) +_sym_db.RegisterMessage(Document.Style.FontSize) +_sym_db.RegisterMessage(Document.Page) +_sym_db.RegisterMessage(Document.Page.Dimension) +_sym_db.RegisterMessage(Document.Page.Layout) +_sym_db.RegisterMessage(Document.Page.Block) +_sym_db.RegisterMessage(Document.Page.Paragraph) +_sym_db.RegisterMessage(Document.Page.Line) +_sym_db.RegisterMessage(Document.Page.Token) +_sym_db.RegisterMessage(Document.Page.Token.DetectedBreak) +_sym_db.RegisterMessage(Document.Page.VisualElement) +_sym_db.RegisterMessage(Document.Page.Table) +_sym_db.RegisterMessage(Document.Page.Table.TableRow) +_sym_db.RegisterMessage(Document.Page.Table.TableCell) +_sym_db.RegisterMessage(Document.Page.FormField) +_sym_db.RegisterMessage(Document.Page.DetectedLanguage) +_sym_db.RegisterMessage(Document.Entity) +_sym_db.RegisterMessage(Document.EntityRelation) +_sym_db.RegisterMessage(Document.TextAnchor) +_sym_db.RegisterMessage(Document.TextAnchor.TextSegment) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/documentai/google/cloud/documentai_v1beta1/proto/document_pb2_grpc.py b/documentai/google/cloud/documentai_v1beta1/proto/document_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/documentai/google/cloud/documentai_v1beta1/proto/document_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/documentai/google/cloud/documentai_v1beta1/proto/document_understanding.proto b/documentai/google/cloud/documentai_v1beta1/proto/document_understanding.proto new file mode 100644 index 000000000000..4f8dfb722986 --- /dev/null +++ b/documentai/google/cloud/documentai_v1beta1/proto/document_understanding.proto @@ -0,0 +1,299 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.documentai.v1beta1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/cloud/documentai/v1beta1/geometry.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/documentai/v1beta1;documentai"; +option java_multiple_files = true; +option java_outer_classname = "DocumentAiProto"; +option java_package = "com.google.cloud.documentai.v1beta1"; + +// Service to parse structured information from unstructured or semi-structured +// documents using state-of-the-art Google AI such as natural language, +// computer vision, and translation. +service DocumentUnderstandingService { + option (google.api.default_host) = "documentai.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform"; + + // LRO endpoint to batch process many documents. + rpc BatchProcessDocuments(BatchProcessDocumentsRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1beta1/{parent=projects/*/locations/*}/documents:batchProcess" + body: "*" + additional_bindings { + post: "/v1beta1/{parent=projects/*}/documents:batchProcess" + body: "*" + } + }; + option (google.api.method_signature) = "requests"; + option (google.longrunning.operation_info) = { + response_type: "BatchProcessDocumentsResponse" + metadata_type: "OperationMetadata" + }; + } +} + +// Request to batch process documents as an asynchronous operation. +message BatchProcessDocumentsRequest { + // Required. Individual requests for each document. + repeated ProcessDocumentRequest requests = 1 + [(google.api.field_behavior) = REQUIRED]; + + // Target project and location to make a call. + // + // Format: `projects/{project-id}/locations/{location-id}`. + // + // If no location is specified, a region will be chosen automatically. + string parent = 2; +} + +// Request to process one document. +message ProcessDocumentRequest { + // Required. Information about the input file. + InputConfig input_config = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The desired output location. + OutputConfig output_config = 2 [(google.api.field_behavior) = REQUIRED]; + + // Specifies a known document type for deeper structure detection. Valid + // values are currently "general" and "invoice". If not provided, "general"\ + // is used as default. If any other value is given, the request is rejected. + string document_type = 3; + + // Controls table extraction behavior. If not specified, the system will + // decide reasonable defaults. + TableExtractionParams table_extraction_params = 4; + + // Controls form extraction behavior. If not specified, the system will + // decide reasonable defaults. + FormExtractionParams form_extraction_params = 5; + + // Controls entity extraction behavior. If not specified, the system will + // decide reasonable defaults. + EntityExtractionParams entity_extraction_params = 6; + + // Controls OCR behavior. If not specified, the system will decide reasonable + // defaults. + OcrParams ocr_params = 7; +} + +// Response to an batch document processing request. This is returned in +// the LRO Operation after the operation is complete. +message BatchProcessDocumentsResponse { + // Responses for each individual document. + repeated ProcessDocumentResponse responses = 1; +} + +// Response to a single document processing request. +message ProcessDocumentResponse { + // Information about the input file. This is the same as the corresponding + // input config in the request. + InputConfig input_config = 1; + + // The output location of the parsed responses. The responses are written to + // this location as JSON-serialized `Document` objects. + OutputConfig output_config = 2; +} + +// Parameters to control Optical Character Recognition (OCR) behavior. +message OcrParams { + // List of languages to use for OCR. In most cases, an empty value + // yields the best results since it enables automatic language detection. For + // languages based on the Latin alphabet, setting `language_hints` is not + // needed. In rare cases, when the language of the text in the image is known, + // setting a hint will help get better results (although it will be a + // significant hindrance if the hint is wrong). Document processing returns an + // error if one or more of the specified languages is not one of the + // supported languages. + repeated string language_hints = 1; +} + +// Parameters to control table extraction behavior. +message TableExtractionParams { + // Whether to enable table extraction. + bool enabled = 1; + + // Optional. Table bounding box hints that can be provided to complex cases + // which our algorithm cannot locate the table(s) in. + repeated TableBoundHint table_bound_hints = 2 + [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Table header hints. The extraction will bias towards producing + // these terms as table headers, which may improve accuracy. + repeated string header_hints = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Model version of the table extraction system. Default is "builtin/stable". + // Specify "builtin/latest" for the latest model. + string model_version = 4; +} + +// A hint for a table bounding box on the page for table parsing. +message TableBoundHint { + // Optional. Page number for multi-paged inputs this hint applies to. If not + // provided, this hint will apply to all pages by default. This value is + // 1-based. + int32 page_number = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Bounding box hint for a table on this page. The coordinates must be + // normalized to [0,1] and the bounding box must be an axis-aligned rectangle. + BoundingPoly bounding_box = 2; +} + +// Parameters to control form extraction behavior. +message FormExtractionParams { + // Whether to enable form extraction. + bool enabled = 1; + + // User can provide pairs of (key text, value type) to improve the parsing + // result. + // + // For example, if a document has a field called "Date" that holds a date + // value and a field called "Amount" that may hold either a currency value + // (e.g., "$500.00") or a simple number value (e.g., "20"), you could use the + // following hints: [ {"key": "Date", value_types: [ "DATE"]}, {"key": + // "Amount", "value_types": [ "PRICE", "NUMBER" ]} ] + // + // If the value type is unknown, but you want to provide hints for the keys, + // you can leave the value_types field blank. e.g. {"key": "Date", + // "value_types": []} + repeated KeyValuePairHint key_value_pair_hints = 2; + + // Model version of the form extraction system. Default is + // "builtin/stable". Specify "builtin/latest" for the latest model. + string model_version = 3; +} + +// User-provided hint for key value pair. +message KeyValuePairHint { + // The key text for the hint. + string key = 1; + + // Type of the value. This is case-insensitive, and could be one of: + // ADDRESS, LOCATION, ORGANIZATION, PERSON, PHONE_NUMBER, + // ID, NUMBER, EMAIL, PRICE, TERMS, DATE, NAME. Types not in this list will + // be ignored. + repeated string value_types = 2; +} + +// Parameters to control entity extraction behavior. +message EntityExtractionParams { + // Whether to enable entity extraction. + bool enabled = 1; + + // Model version of the entity extraction. Default is + // "builtin/stable". Specify "builtin/latest" for the latest model. + string model_version = 2; +} + +// The desired input location and metadata. +message InputConfig { + // Required. + oneof source { + // The Google Cloud Storage location to read the input from. This must be a + // single file. + GcsSource gcs_source = 1; + } + + // Required. Mimetype of the input. Current supported mimetypes are + // application/pdf, image/tiff, and image/gif. + string mime_type = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// The desired output location and metadata. +message OutputConfig { + // Required. + oneof destination { + // The Google Cloud Storage location to write the output to. + GcsDestination gcs_destination = 1; + } + + // The max number of pages to include into each output Document shard JSON on + // Google Cloud Storage. + // + // The valid range is [1, 100]. If not specified, the default value is 20. + // + // For example, for one pdf file with 100 pages, 100 parsed pages will be + // produced. If `pages_per_shard` = 20, then 5 Document shard JSON files each + // containing 20 parsed pages will be written under the prefix + // [OutputConfig.gcs_destination.uri][] and suffix pages-x-to-y.json where + // x and y are 1-indexed page numbers. + // + // Example GCS outputs with 157 pages and pages_per_shard = 50: + // + // pages-001-to-050.json + // pages-051-to-100.json + // pages-101-to-150.json + // pages-151-to-157.json + int32 pages_per_shard = 2; +} + +// The Google Cloud Storage location where the input file will be read from. +message GcsSource { + string uri = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// The Google Cloud Storage location where the output file will be written to. +message GcsDestination { + string uri = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// Contains metadata for the BatchProcessDocuments operation. +message OperationMetadata { + enum State { + // The default value. This value is used if the state is omitted. + STATE_UNSPECIFIED = 0; + + // Request is received. + ACCEPTED = 1; + + // Request operation is waiting for scheduling. + WAITING = 2; + + // Request is being processed. + RUNNING = 3; + + // The batch processing completed successfully. + SUCCEEDED = 4; + + // The batch processing was cancelled. + CANCELLED = 5; + + // The batch processing has failed. + FAILED = 6; + } + + // The state of the current batch processing. + State state = 1; + + // A message providing more details about the current state of processing. + string state_message = 2; + + // The creation time of the operation. + google.protobuf.Timestamp create_time = 3; + + // The last update time of the operation. + google.protobuf.Timestamp update_time = 4; +} diff --git a/documentai/google/cloud/documentai_v1beta1/proto/document_understanding_pb2.py b/documentai/google/cloud/documentai_v1beta1/proto/document_understanding_pb2.py new file mode 100644 index 000000000000..9c978420b798 --- /dev/null +++ b/documentai/google/cloud/documentai_v1beta1/proto/document_understanding_pb2.py @@ -0,0 +1,1554 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/documentai_v1beta1/proto/document_understanding.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.cloud.documentai_v1beta1.proto import ( + geometry_pb2 as google_dot_cloud_dot_documentai__v1beta1_dot_proto_dot_geometry__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/documentai_v1beta1/proto/document_understanding.proto", + package="google.cloud.documentai.v1beta1", + syntax="proto3", + serialized_options=_b( + "\n#com.google.cloud.documentai.v1beta1B\017DocumentAiProtoP\001ZIgoogle.golang.org/genproto/googleapis/cloud/documentai/v1beta1;documentai" + ), + serialized_pb=_b( + '\nBgoogle/cloud/documentai_v1beta1/proto/document_understanding.proto\x12\x1fgoogle.cloud.documentai.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x34google/cloud/documentai_v1beta1/proto/geometry.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"~\n\x1c\x42\x61tchProcessDocumentsRequest\x12N\n\x08requests\x18\x01 \x03(\x0b\x32\x37.google.cloud.documentai.v1beta1.ProcessDocumentRequestB\x03\xe0\x41\x02\x12\x0e\n\x06parent\x18\x02 \x01(\t"\x8e\x04\n\x16ProcessDocumentRequest\x12G\n\x0cinput_config\x18\x01 \x01(\x0b\x32,.google.cloud.documentai.v1beta1.InputConfigB\x03\xe0\x41\x02\x12I\n\routput_config\x18\x02 \x01(\x0b\x32-.google.cloud.documentai.v1beta1.OutputConfigB\x03\xe0\x41\x02\x12\x15\n\rdocument_type\x18\x03 \x01(\t\x12W\n\x17table_extraction_params\x18\x04 \x01(\x0b\x32\x36.google.cloud.documentai.v1beta1.TableExtractionParams\x12U\n\x16\x66orm_extraction_params\x18\x05 \x01(\x0b\x32\x35.google.cloud.documentai.v1beta1.FormExtractionParams\x12Y\n\x18\x65ntity_extraction_params\x18\x06 \x01(\x0b\x32\x37.google.cloud.documentai.v1beta1.EntityExtractionParams\x12>\n\nocr_params\x18\x07 \x01(\x0b\x32*.google.cloud.documentai.v1beta1.OcrParams"l\n\x1d\x42\x61tchProcessDocumentsResponse\x12K\n\tresponses\x18\x01 \x03(\x0b\x32\x38.google.cloud.documentai.v1beta1.ProcessDocumentResponse"\xa3\x01\n\x17ProcessDocumentResponse\x12\x42\n\x0cinput_config\x18\x01 \x01(\x0b\x32,.google.cloud.documentai.v1beta1.InputConfig\x12\x44\n\routput_config\x18\x02 \x01(\x0b\x32-.google.cloud.documentai.v1beta1.OutputConfig"#\n\tOcrParams\x12\x16\n\x0elanguage_hints\x18\x01 \x03(\t"\xab\x01\n\x15TableExtractionParams\x12\x0f\n\x07\x65nabled\x18\x01 \x01(\x08\x12O\n\x11table_bound_hints\x18\x02 \x03(\x0b\x32/.google.cloud.documentai.v1beta1.TableBoundHintB\x03\xe0\x41\x01\x12\x19\n\x0cheader_hints\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x15\n\rmodel_version\x18\x04 \x01(\t"o\n\x0eTableBoundHint\x12\x18\n\x0bpage_number\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x43\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32-.google.cloud.documentai.v1beta1.BoundingPoly"\x8f\x01\n\x14\x46ormExtractionParams\x12\x0f\n\x07\x65nabled\x18\x01 \x01(\x08\x12O\n\x14key_value_pair_hints\x18\x02 \x03(\x0b\x32\x31.google.cloud.documentai.v1beta1.KeyValuePairHint\x12\x15\n\rmodel_version\x18\x03 \x01(\t"4\n\x10KeyValuePairHint\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x13\n\x0bvalue_types\x18\x02 \x03(\t"@\n\x16\x45ntityExtractionParams\x12\x0f\n\x07\x65nabled\x18\x01 \x01(\x08\x12\x15\n\rmodel_version\x18\x02 \x01(\t"q\n\x0bInputConfig\x12@\n\ngcs_source\x18\x01 \x01(\x0b\x32*.google.cloud.documentai.v1beta1.GcsSourceH\x00\x12\x16\n\tmime_type\x18\x02 \x01(\tB\x03\xe0\x41\x02\x42\x08\n\x06source"\x82\x01\n\x0cOutputConfig\x12J\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32/.google.cloud.documentai.v1beta1.GcsDestinationH\x00\x12\x17\n\x0fpages_per_shard\x18\x02 \x01(\x05\x42\r\n\x0b\x64\x65stination"\x1d\n\tGcsSource\x12\x10\n\x03uri\x18\x01 \x01(\tB\x03\xe0\x41\x02""\n\x0eGcsDestination\x12\x10\n\x03uri\x18\x01 \x01(\tB\x03\xe0\x41\x02"\xc7\x02\n\x11OperationMetadata\x12G\n\x05state\x18\x01 \x01(\x0e\x32\x38.google.cloud.documentai.v1beta1.OperationMetadata.State\x12\x15\n\rstate_message\x18\x02 \x01(\t\x12/\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"p\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x01\x12\x0b\n\x07WAITING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\r\n\tCANCELLED\x10\x05\x12\n\n\x06\x46\x41ILED\x10\x06\x32\xac\x03\n\x1c\x44ocumentUnderstandingService\x12\xbc\x02\n\x15\x42\x61tchProcessDocuments\x12=.google.cloud.documentai.v1beta1.BatchProcessDocumentsRequest\x1a\x1d.google.longrunning.Operation"\xc4\x01\x82\xd3\xe4\x93\x02~"?/v1beta1/{parent=projects/*/locations/*}/documents:batchProcess:\x01*Z8"3/v1beta1/{parent=projects/*}/documents:batchProcess:\x01*\xda\x41\x08requests\xca\x41\x32\n\x1d\x42\x61tchProcessDocumentsResponse\x12\x11OperationMetadata\x1aM\xca\x41\x19\x64ocumentai.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x83\x01\n#com.google.cloud.documentai.v1beta1B\x0f\x44ocumentAiProtoP\x01ZIgoogle.golang.org/genproto/googleapis/cloud/documentai/v1beta1;documentaib\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_cloud_dot_documentai__v1beta1_dot_proto_dot_geometry__pb2.DESCRIPTOR, + google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) + + +_OPERATIONMETADATA_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.cloud.documentai.v1beta1.OperationMetadata.State", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="ACCEPTED", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="WAITING", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="RUNNING", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SUCCEEDED", index=4, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLED", index=5, number=5, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="FAILED", index=6, number=6, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=2369, + serialized_end=2481, +) +_sym_db.RegisterEnumDescriptor(_OPERATIONMETADATA_STATE) + + +_BATCHPROCESSDOCUMENTSREQUEST = _descriptor.Descriptor( + name="BatchProcessDocumentsRequest", + full_name="google.cloud.documentai.v1beta1.BatchProcessDocumentsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="requests", + full_name="google.cloud.documentai.v1beta1.BatchProcessDocumentsRequest.requests", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.documentai.v1beta1.BatchProcessDocumentsRequest.parent", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=315, + serialized_end=441, +) + + +_PROCESSDOCUMENTREQUEST = _descriptor.Descriptor( + name="ProcessDocumentRequest", + full_name="google.cloud.documentai.v1beta1.ProcessDocumentRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="input_config", + full_name="google.cloud.documentai.v1beta1.ProcessDocumentRequest.input_config", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="output_config", + full_name="google.cloud.documentai.v1beta1.ProcessDocumentRequest.output_config", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="document_type", + full_name="google.cloud.documentai.v1beta1.ProcessDocumentRequest.document_type", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="table_extraction_params", + full_name="google.cloud.documentai.v1beta1.ProcessDocumentRequest.table_extraction_params", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="form_extraction_params", + full_name="google.cloud.documentai.v1beta1.ProcessDocumentRequest.form_extraction_params", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entity_extraction_params", + full_name="google.cloud.documentai.v1beta1.ProcessDocumentRequest.entity_extraction_params", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="ocr_params", + full_name="google.cloud.documentai.v1beta1.ProcessDocumentRequest.ocr_params", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=444, + serialized_end=970, +) + + +_BATCHPROCESSDOCUMENTSRESPONSE = _descriptor.Descriptor( + name="BatchProcessDocumentsResponse", + full_name="google.cloud.documentai.v1beta1.BatchProcessDocumentsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="responses", + full_name="google.cloud.documentai.v1beta1.BatchProcessDocumentsResponse.responses", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=972, + serialized_end=1080, +) + + +_PROCESSDOCUMENTRESPONSE = _descriptor.Descriptor( + name="ProcessDocumentResponse", + full_name="google.cloud.documentai.v1beta1.ProcessDocumentResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="input_config", + full_name="google.cloud.documentai.v1beta1.ProcessDocumentResponse.input_config", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="output_config", + full_name="google.cloud.documentai.v1beta1.ProcessDocumentResponse.output_config", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1083, + serialized_end=1246, +) + + +_OCRPARAMS = _descriptor.Descriptor( + name="OcrParams", + full_name="google.cloud.documentai.v1beta1.OcrParams", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="language_hints", + full_name="google.cloud.documentai.v1beta1.OcrParams.language_hints", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1248, + serialized_end=1283, +) + + +_TABLEEXTRACTIONPARAMS = _descriptor.Descriptor( + name="TableExtractionParams", + full_name="google.cloud.documentai.v1beta1.TableExtractionParams", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="enabled", + full_name="google.cloud.documentai.v1beta1.TableExtractionParams.enabled", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="table_bound_hints", + full_name="google.cloud.documentai.v1beta1.TableExtractionParams.table_bound_hints", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="header_hints", + full_name="google.cloud.documentai.v1beta1.TableExtractionParams.header_hints", + index=2, + number=3, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="model_version", + full_name="google.cloud.documentai.v1beta1.TableExtractionParams.model_version", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1286, + serialized_end=1457, +) + + +_TABLEBOUNDHINT = _descriptor.Descriptor( + name="TableBoundHint", + full_name="google.cloud.documentai.v1beta1.TableBoundHint", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="page_number", + full_name="google.cloud.documentai.v1beta1.TableBoundHint.page_number", + index=0, + number=1, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="bounding_box", + full_name="google.cloud.documentai.v1beta1.TableBoundHint.bounding_box", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1459, + serialized_end=1570, +) + + +_FORMEXTRACTIONPARAMS = _descriptor.Descriptor( + name="FormExtractionParams", + full_name="google.cloud.documentai.v1beta1.FormExtractionParams", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="enabled", + full_name="google.cloud.documentai.v1beta1.FormExtractionParams.enabled", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="key_value_pair_hints", + full_name="google.cloud.documentai.v1beta1.FormExtractionParams.key_value_pair_hints", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="model_version", + full_name="google.cloud.documentai.v1beta1.FormExtractionParams.model_version", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1573, + serialized_end=1716, +) + + +_KEYVALUEPAIRHINT = _descriptor.Descriptor( + name="KeyValuePairHint", + full_name="google.cloud.documentai.v1beta1.KeyValuePairHint", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.documentai.v1beta1.KeyValuePairHint.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value_types", + full_name="google.cloud.documentai.v1beta1.KeyValuePairHint.value_types", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1718, + serialized_end=1770, +) + + +_ENTITYEXTRACTIONPARAMS = _descriptor.Descriptor( + name="EntityExtractionParams", + full_name="google.cloud.documentai.v1beta1.EntityExtractionParams", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="enabled", + full_name="google.cloud.documentai.v1beta1.EntityExtractionParams.enabled", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="model_version", + full_name="google.cloud.documentai.v1beta1.EntityExtractionParams.model_version", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1772, + serialized_end=1836, +) + + +_INPUTCONFIG = _descriptor.Descriptor( + name="InputConfig", + full_name="google.cloud.documentai.v1beta1.InputConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="gcs_source", + full_name="google.cloud.documentai.v1beta1.InputConfig.gcs_source", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mime_type", + full_name="google.cloud.documentai.v1beta1.InputConfig.mime_type", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="source", + full_name="google.cloud.documentai.v1beta1.InputConfig.source", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1838, + serialized_end=1951, +) + + +_OUTPUTCONFIG = _descriptor.Descriptor( + name="OutputConfig", + full_name="google.cloud.documentai.v1beta1.OutputConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="gcs_destination", + full_name="google.cloud.documentai.v1beta1.OutputConfig.gcs_destination", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="pages_per_shard", + full_name="google.cloud.documentai.v1beta1.OutputConfig.pages_per_shard", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="destination", + full_name="google.cloud.documentai.v1beta1.OutputConfig.destination", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1954, + serialized_end=2084, +) + + +_GCSSOURCE = _descriptor.Descriptor( + name="GcsSource", + full_name="google.cloud.documentai.v1beta1.GcsSource", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="uri", + full_name="google.cloud.documentai.v1beta1.GcsSource.uri", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2086, + serialized_end=2115, +) + + +_GCSDESTINATION = _descriptor.Descriptor( + name="GcsDestination", + full_name="google.cloud.documentai.v1beta1.GcsDestination", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="uri", + full_name="google.cloud.documentai.v1beta1.GcsDestination.uri", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2117, + serialized_end=2151, +) + + +_OPERATIONMETADATA = _descriptor.Descriptor( + name="OperationMetadata", + full_name="google.cloud.documentai.v1beta1.OperationMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="state", + full_name="google.cloud.documentai.v1beta1.OperationMetadata.state", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state_message", + full_name="google.cloud.documentai.v1beta1.OperationMetadata.state_message", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.cloud.documentai.v1beta1.OperationMetadata.create_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.cloud.documentai.v1beta1.OperationMetadata.update_time", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_OPERATIONMETADATA_STATE], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2154, + serialized_end=2481, +) + +_BATCHPROCESSDOCUMENTSREQUEST.fields_by_name[ + "requests" +].message_type = _PROCESSDOCUMENTREQUEST +_PROCESSDOCUMENTREQUEST.fields_by_name["input_config"].message_type = _INPUTCONFIG +_PROCESSDOCUMENTREQUEST.fields_by_name["output_config"].message_type = _OUTPUTCONFIG +_PROCESSDOCUMENTREQUEST.fields_by_name[ + "table_extraction_params" +].message_type = _TABLEEXTRACTIONPARAMS +_PROCESSDOCUMENTREQUEST.fields_by_name[ + "form_extraction_params" +].message_type = _FORMEXTRACTIONPARAMS +_PROCESSDOCUMENTREQUEST.fields_by_name[ + "entity_extraction_params" +].message_type = _ENTITYEXTRACTIONPARAMS +_PROCESSDOCUMENTREQUEST.fields_by_name["ocr_params"].message_type = _OCRPARAMS +_BATCHPROCESSDOCUMENTSRESPONSE.fields_by_name[ + "responses" +].message_type = _PROCESSDOCUMENTRESPONSE +_PROCESSDOCUMENTRESPONSE.fields_by_name["input_config"].message_type = _INPUTCONFIG +_PROCESSDOCUMENTRESPONSE.fields_by_name["output_config"].message_type = _OUTPUTCONFIG +_TABLEEXTRACTIONPARAMS.fields_by_name[ + "table_bound_hints" +].message_type = _TABLEBOUNDHINT +_TABLEBOUNDHINT.fields_by_name[ + "bounding_box" +].message_type = ( + google_dot_cloud_dot_documentai__v1beta1_dot_proto_dot_geometry__pb2._BOUNDINGPOLY +) +_FORMEXTRACTIONPARAMS.fields_by_name[ + "key_value_pair_hints" +].message_type = _KEYVALUEPAIRHINT +_INPUTCONFIG.fields_by_name["gcs_source"].message_type = _GCSSOURCE +_INPUTCONFIG.oneofs_by_name["source"].fields.append( + _INPUTCONFIG.fields_by_name["gcs_source"] +) +_INPUTCONFIG.fields_by_name[ + "gcs_source" +].containing_oneof = _INPUTCONFIG.oneofs_by_name["source"] +_OUTPUTCONFIG.fields_by_name["gcs_destination"].message_type = _GCSDESTINATION +_OUTPUTCONFIG.oneofs_by_name["destination"].fields.append( + _OUTPUTCONFIG.fields_by_name["gcs_destination"] +) +_OUTPUTCONFIG.fields_by_name[ + "gcs_destination" +].containing_oneof = _OUTPUTCONFIG.oneofs_by_name["destination"] +_OPERATIONMETADATA.fields_by_name["state"].enum_type = _OPERATIONMETADATA_STATE +_OPERATIONMETADATA.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_OPERATIONMETADATA.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_OPERATIONMETADATA_STATE.containing_type = _OPERATIONMETADATA +DESCRIPTOR.message_types_by_name[ + "BatchProcessDocumentsRequest" +] = _BATCHPROCESSDOCUMENTSREQUEST +DESCRIPTOR.message_types_by_name["ProcessDocumentRequest"] = _PROCESSDOCUMENTREQUEST +DESCRIPTOR.message_types_by_name[ + "BatchProcessDocumentsResponse" +] = _BATCHPROCESSDOCUMENTSRESPONSE +DESCRIPTOR.message_types_by_name["ProcessDocumentResponse"] = _PROCESSDOCUMENTRESPONSE +DESCRIPTOR.message_types_by_name["OcrParams"] = _OCRPARAMS +DESCRIPTOR.message_types_by_name["TableExtractionParams"] = _TABLEEXTRACTIONPARAMS +DESCRIPTOR.message_types_by_name["TableBoundHint"] = _TABLEBOUNDHINT +DESCRIPTOR.message_types_by_name["FormExtractionParams"] = _FORMEXTRACTIONPARAMS +DESCRIPTOR.message_types_by_name["KeyValuePairHint"] = _KEYVALUEPAIRHINT +DESCRIPTOR.message_types_by_name["EntityExtractionParams"] = _ENTITYEXTRACTIONPARAMS +DESCRIPTOR.message_types_by_name["InputConfig"] = _INPUTCONFIG +DESCRIPTOR.message_types_by_name["OutputConfig"] = _OUTPUTCONFIG +DESCRIPTOR.message_types_by_name["GcsSource"] = _GCSSOURCE +DESCRIPTOR.message_types_by_name["GcsDestination"] = _GCSDESTINATION +DESCRIPTOR.message_types_by_name["OperationMetadata"] = _OPERATIONMETADATA +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +BatchProcessDocumentsRequest = _reflection.GeneratedProtocolMessageType( + "BatchProcessDocumentsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHPROCESSDOCUMENTSREQUEST, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""Request to batch process documents as an asynchronous operation. + + + Attributes: + requests: + Required. Individual requests for each document. + parent: + Target project and location to make a call. Format: + ``projects/{project-id}/locations/{location-id}``. If no + location is specified, a region will be chosen automatically. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.BatchProcessDocumentsRequest) + ), +) +_sym_db.RegisterMessage(BatchProcessDocumentsRequest) + +ProcessDocumentRequest = _reflection.GeneratedProtocolMessageType( + "ProcessDocumentRequest", + (_message.Message,), + dict( + DESCRIPTOR=_PROCESSDOCUMENTREQUEST, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""Request to process one document. + + + Attributes: + input_config: + Required. Information about the input file. + output_config: + Required. The desired output location. + document_type: + Specifies a known document type for deeper structure + detection. Valid values are currently "general" and + "invoice". If not provided, "general" is used as default. + If any other value is given, the request is rejected. + table_extraction_params: + Controls table extraction behavior. If not specified, the + system will decide reasonable defaults. + form_extraction_params: + Controls form extraction behavior. If not specified, the + system will decide reasonable defaults. + entity_extraction_params: + Controls entity extraction behavior. If not specified, the + system will decide reasonable defaults. + ocr_params: + Controls OCR behavior. If not specified, the system will + decide reasonable defaults. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.ProcessDocumentRequest) + ), +) +_sym_db.RegisterMessage(ProcessDocumentRequest) + +BatchProcessDocumentsResponse = _reflection.GeneratedProtocolMessageType( + "BatchProcessDocumentsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHPROCESSDOCUMENTSRESPONSE, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""Response to an batch document processing request. This is returned in + the LRO Operation after the operation is complete. + + + Attributes: + responses: + Responses for each individual document. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.BatchProcessDocumentsResponse) + ), +) +_sym_db.RegisterMessage(BatchProcessDocumentsResponse) + +ProcessDocumentResponse = _reflection.GeneratedProtocolMessageType( + "ProcessDocumentResponse", + (_message.Message,), + dict( + DESCRIPTOR=_PROCESSDOCUMENTRESPONSE, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""Response to a single document processing request. + + + Attributes: + input_config: + Information about the input file. This is the same as the + corresponding input config in the request. + output_config: + The output location of the parsed responses. The responses are + written to this location as JSON-serialized ``Document`` + objects. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.ProcessDocumentResponse) + ), +) +_sym_db.RegisterMessage(ProcessDocumentResponse) + +OcrParams = _reflection.GeneratedProtocolMessageType( + "OcrParams", + (_message.Message,), + dict( + DESCRIPTOR=_OCRPARAMS, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""Parameters to control Optical Character Recognition (OCR) behavior. + + + Attributes: + language_hints: + List of languages to use for OCR. In most cases, an empty + value yields the best results since it enables automatic + language detection. For languages based on the Latin alphabet, + setting ``language_hints`` is not needed. In rare cases, when + the language of the text in the image is known, setting a hint + will help get better results (although it will be a + significant hindrance if the hint is wrong). Document + processing returns an error if one or more of the specified + languages is not one of the supported languages. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.OcrParams) + ), +) +_sym_db.RegisterMessage(OcrParams) + +TableExtractionParams = _reflection.GeneratedProtocolMessageType( + "TableExtractionParams", + (_message.Message,), + dict( + DESCRIPTOR=_TABLEEXTRACTIONPARAMS, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""Parameters to control table extraction behavior. + + + Attributes: + enabled: + Whether to enable table extraction. + table_bound_hints: + Optional. Table bounding box hints that can be provided to + complex cases which our algorithm cannot locate the table(s) + in. + header_hints: + Optional. Table header hints. The extraction will bias towards + producing these terms as table headers, which may improve + accuracy. + model_version: + Model version of the table extraction system. Default is + "builtin/stable". Specify "builtin/latest" for the latest + model. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.TableExtractionParams) + ), +) +_sym_db.RegisterMessage(TableExtractionParams) + +TableBoundHint = _reflection.GeneratedProtocolMessageType( + "TableBoundHint", + (_message.Message,), + dict( + DESCRIPTOR=_TABLEBOUNDHINT, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""A hint for a table bounding box on the page for table parsing. + + + Attributes: + page_number: + Optional. Page number for multi-paged inputs this hint applies + to. If not provided, this hint will apply to all pages by + default. This value is 1-based. + bounding_box: + Bounding box hint for a table on this page. The coordinates + must be normalized to [0,1] and the bounding box must be an + axis-aligned rectangle. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.TableBoundHint) + ), +) +_sym_db.RegisterMessage(TableBoundHint) + +FormExtractionParams = _reflection.GeneratedProtocolMessageType( + "FormExtractionParams", + (_message.Message,), + dict( + DESCRIPTOR=_FORMEXTRACTIONPARAMS, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""Parameters to control form extraction behavior. + + + Attributes: + enabled: + Whether to enable form extraction. + key_value_pair_hints: + User can provide pairs of (key text, value type) to improve + the parsing result. For example, if a document has a field + called "Date" that holds a date value and a field called + "Amount" that may hold either a currency value (e.g., + "$500.00") or a simple number value (e.g., "20"), you could + use the following hints: [ {"key": "Date", value\_types: [ + "DATE"]}, {"key": "Amount", "value\_types": [ "PRICE", + "NUMBER" ]} ] If the value type is unknown, but you want to + provide hints for the keys, you can leave the value\_types + field blank. e.g. {"key": "Date", "value\_types": []} + model_version: + Model version of the form extraction system. Default is + "builtin/stable". Specify "builtin/latest" for the latest + model. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.FormExtractionParams) + ), +) +_sym_db.RegisterMessage(FormExtractionParams) + +KeyValuePairHint = _reflection.GeneratedProtocolMessageType( + "KeyValuePairHint", + (_message.Message,), + dict( + DESCRIPTOR=_KEYVALUEPAIRHINT, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""User-provided hint for key value pair. + + + Attributes: + key: + The key text for the hint. + value_types: + Type of the value. This is case-insensitive, and could be one + of: ADDRESS, LOCATION, ORGANIZATION, PERSON, PHONE\_NUMBER, + ID, NUMBER, EMAIL, PRICE, TERMS, DATE, NAME. Types not in this + list will be ignored. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.KeyValuePairHint) + ), +) +_sym_db.RegisterMessage(KeyValuePairHint) + +EntityExtractionParams = _reflection.GeneratedProtocolMessageType( + "EntityExtractionParams", + (_message.Message,), + dict( + DESCRIPTOR=_ENTITYEXTRACTIONPARAMS, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""Parameters to control entity extraction behavior. + + + Attributes: + enabled: + Whether to enable entity extraction. + model_version: + Model version of the entity extraction. Default is + "builtin/stable". Specify "builtin/latest" for the latest + model. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.EntityExtractionParams) + ), +) +_sym_db.RegisterMessage(EntityExtractionParams) + +InputConfig = _reflection.GeneratedProtocolMessageType( + "InputConfig", + (_message.Message,), + dict( + DESCRIPTOR=_INPUTCONFIG, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""The desired input location and metadata. + + + Attributes: + source: + Required. + gcs_source: + The Google Cloud Storage location to read the input from. This + must be a single file. + mime_type: + Required. Mimetype of the input. Current supported mimetypes + are application/pdf, image/tiff, and image/gif. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.InputConfig) + ), +) +_sym_db.RegisterMessage(InputConfig) + +OutputConfig = _reflection.GeneratedProtocolMessageType( + "OutputConfig", + (_message.Message,), + dict( + DESCRIPTOR=_OUTPUTCONFIG, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""The desired output location and metadata. + + + Attributes: + destination: + Required. + gcs_destination: + The Google Cloud Storage location to write the output to. + pages_per_shard: + The max number of pages to include into each output Document + shard JSON on Google Cloud Storage. The valid range is [1, + 100]. If not specified, the default value is 20. For example, + for one pdf file with 100 pages, 100 parsed pages will be + produced. If ``pages_per_shard`` = 20, then 5 Document shard + JSON files each containing 20 parsed pages will be written + under the prefix [OutputConfig.gcs\_destination.uri][] and + suffix pages-x-to-y.json where x and y are 1-indexed page + numbers. Example GCS outputs with 157 pages and + pages\_per\_shard = 50: pages-001-to-050.json + pages-051-to-100.json pages-101-to-150.json + pages-151-to-157.json + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.OutputConfig) + ), +) +_sym_db.RegisterMessage(OutputConfig) + +GcsSource = _reflection.GeneratedProtocolMessageType( + "GcsSource", + (_message.Message,), + dict( + DESCRIPTOR=_GCSSOURCE, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""The Google Cloud Storage location where the input file will be read + from. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.GcsSource) + ), +) +_sym_db.RegisterMessage(GcsSource) + +GcsDestination = _reflection.GeneratedProtocolMessageType( + "GcsDestination", + (_message.Message,), + dict( + DESCRIPTOR=_GCSDESTINATION, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""The Google Cloud Storage location where the output file will be written + to. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.GcsDestination) + ), +) +_sym_db.RegisterMessage(GcsDestination) + +OperationMetadata = _reflection.GeneratedProtocolMessageType( + "OperationMetadata", + (_message.Message,), + dict( + DESCRIPTOR=_OPERATIONMETADATA, + __module__="google.cloud.documentai_v1beta1.proto.document_understanding_pb2", + __doc__="""Contains metadata for the BatchProcessDocuments operation. + + + Attributes: + state: + The state of the current batch processing. + state_message: + A message providing more details about the current state of + processing. + create_time: + The creation time of the operation. + update_time: + The last update time of the operation. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.OperationMetadata) + ), +) +_sym_db.RegisterMessage(OperationMetadata) + + +DESCRIPTOR._options = None +_BATCHPROCESSDOCUMENTSREQUEST.fields_by_name["requests"]._options = None +_PROCESSDOCUMENTREQUEST.fields_by_name["input_config"]._options = None +_PROCESSDOCUMENTREQUEST.fields_by_name["output_config"]._options = None +_TABLEEXTRACTIONPARAMS.fields_by_name["table_bound_hints"]._options = None +_TABLEEXTRACTIONPARAMS.fields_by_name["header_hints"]._options = None +_TABLEBOUNDHINT.fields_by_name["page_number"]._options = None +_INPUTCONFIG.fields_by_name["mime_type"]._options = None +_GCSSOURCE.fields_by_name["uri"]._options = None +_GCSDESTINATION.fields_by_name["uri"]._options = None + +_DOCUMENTUNDERSTANDINGSERVICE = _descriptor.ServiceDescriptor( + name="DocumentUnderstandingService", + full_name="google.cloud.documentai.v1beta1.DocumentUnderstandingService", + file=DESCRIPTOR, + index=0, + serialized_options=_b( + "\312A\031documentai.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=2484, + serialized_end=2912, + methods=[ + _descriptor.MethodDescriptor( + name="BatchProcessDocuments", + full_name="google.cloud.documentai.v1beta1.DocumentUnderstandingService.BatchProcessDocuments", + index=0, + containing_service=None, + input_type=_BATCHPROCESSDOCUMENTSREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002~"?/v1beta1/{parent=projects/*/locations/*}/documents:batchProcess:\001*Z8"3/v1beta1/{parent=projects/*}/documents:batchProcess:\001*\332A\010requests\312A2\n\035BatchProcessDocumentsResponse\022\021OperationMetadata' + ), + ) + ], +) +_sym_db.RegisterServiceDescriptor(_DOCUMENTUNDERSTANDINGSERVICE) + +DESCRIPTOR.services_by_name[ + "DocumentUnderstandingService" +] = _DOCUMENTUNDERSTANDINGSERVICE + +# @@protoc_insertion_point(module_scope) diff --git a/documentai/google/cloud/documentai_v1beta1/proto/document_understanding_pb2_grpc.py b/documentai/google/cloud/documentai_v1beta1/proto/document_understanding_pb2_grpc.py new file mode 100644 index 000000000000..6e6d2308d9af --- /dev/null +++ b/documentai/google/cloud/documentai_v1beta1/proto/document_understanding_pb2_grpc.py @@ -0,0 +1,57 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.documentai_v1beta1.proto import ( + document_understanding_pb2 as google_dot_cloud_dot_documentai__v1beta1_dot_proto_dot_document__understanding__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) + + +class DocumentUnderstandingServiceStub(object): + """Service to parse structured information from unstructured or semi-structured + documents using state-of-the-art Google AI such as natural language, + computer vision, and translation. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.BatchProcessDocuments = channel.unary_unary( + "/google.cloud.documentai.v1beta1.DocumentUnderstandingService/BatchProcessDocuments", + request_serializer=google_dot_cloud_dot_documentai__v1beta1_dot_proto_dot_document__understanding__pb2.BatchProcessDocumentsRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + + +class DocumentUnderstandingServiceServicer(object): + """Service to parse structured information from unstructured or semi-structured + documents using state-of-the-art Google AI such as natural language, + computer vision, and translation. + """ + + def BatchProcessDocuments(self, request, context): + """LRO endpoint to batch process many documents. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_DocumentUnderstandingServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + "BatchProcessDocuments": grpc.unary_unary_rpc_method_handler( + servicer.BatchProcessDocuments, + request_deserializer=google_dot_cloud_dot_documentai__v1beta1_dot_proto_dot_document__understanding__pb2.BatchProcessDocumentsRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ) + } + generic_handler = grpc.method_handlers_generic_handler( + "google.cloud.documentai.v1beta1.DocumentUnderstandingService", + rpc_method_handlers, + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/documentai/google/cloud/documentai_v1beta1/proto/geometry.proto b/documentai/google/cloud/documentai_v1beta1/proto/geometry.proto new file mode 100644 index 000000000000..9dbe2b78b134 --- /dev/null +++ b/documentai/google/cloud/documentai_v1beta1/proto/geometry.proto @@ -0,0 +1,55 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.documentai.v1beta1; + +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/documentai/v1beta1;documentai"; +option java_multiple_files = true; +option java_outer_classname = "GeometryProto"; +option java_package = "com.google.cloud.documentai.v1beta1"; + +// A vertex represents a 2D point in the image. +// NOTE: the vertex coordinates are in the same scale as the original image. +message Vertex { + // X coordinate. + int32 x = 1; + + // Y coordinate. + int32 y = 2; +} + +// A vertex represents a 2D point in the image. +// NOTE: the normalized vertex coordinates are relative to the original image +// and range from 0 to 1. +message NormalizedVertex { + // X coordinate. + float x = 1; + + // Y coordinate. + float y = 2; +} + +// A bounding polygon for the detected image annotation. +message BoundingPoly { + // The bounding polygon vertices. + repeated Vertex vertices = 1; + + // The bounding polygon normalized vertices. + repeated NormalizedVertex normalized_vertices = 2; +} diff --git a/documentai/google/cloud/documentai_v1beta1/proto/geometry_pb2.py b/documentai/google/cloud/documentai_v1beta1/proto/geometry_pb2.py new file mode 100644 index 000000000000..2e4217e32a0e --- /dev/null +++ b/documentai/google/cloud/documentai_v1beta1/proto/geometry_pb2.py @@ -0,0 +1,270 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/documentai_v1beta1/proto/geometry.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/documentai_v1beta1/proto/geometry.proto", + package="google.cloud.documentai.v1beta1", + syntax="proto3", + serialized_options=_b( + "\n#com.google.cloud.documentai.v1beta1B\rGeometryProtoP\001ZIgoogle.golang.org/genproto/googleapis/cloud/documentai/v1beta1;documentai" + ), + serialized_pb=_b( + '\n4google/cloud/documentai_v1beta1/proto/geometry.proto\x12\x1fgoogle.cloud.documentai.v1beta1\x1a\x1cgoogle/api/annotations.proto"\x1e\n\x06Vertex\x12\t\n\x01x\x18\x01 \x01(\x05\x12\t\n\x01y\x18\x02 \x01(\x05"(\n\x10NormalizedVertex\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02"\x99\x01\n\x0c\x42oundingPoly\x12\x39\n\x08vertices\x18\x01 \x03(\x0b\x32\'.google.cloud.documentai.v1beta1.Vertex\x12N\n\x13normalized_vertices\x18\x02 \x03(\x0b\x32\x31.google.cloud.documentai.v1beta1.NormalizedVertexB\x81\x01\n#com.google.cloud.documentai.v1beta1B\rGeometryProtoP\x01ZIgoogle.golang.org/genproto/googleapis/cloud/documentai/v1beta1;documentaib\x06proto3' + ), + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], +) + + +_VERTEX = _descriptor.Descriptor( + name="Vertex", + full_name="google.cloud.documentai.v1beta1.Vertex", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="x", + full_name="google.cloud.documentai.v1beta1.Vertex.x", + index=0, + number=1, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="y", + full_name="google.cloud.documentai.v1beta1.Vertex.y", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=119, + serialized_end=149, +) + + +_NORMALIZEDVERTEX = _descriptor.Descriptor( + name="NormalizedVertex", + full_name="google.cloud.documentai.v1beta1.NormalizedVertex", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="x", + full_name="google.cloud.documentai.v1beta1.NormalizedVertex.x", + index=0, + number=1, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="y", + full_name="google.cloud.documentai.v1beta1.NormalizedVertex.y", + index=1, + number=2, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=151, + serialized_end=191, +) + + +_BOUNDINGPOLY = _descriptor.Descriptor( + name="BoundingPoly", + full_name="google.cloud.documentai.v1beta1.BoundingPoly", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="vertices", + full_name="google.cloud.documentai.v1beta1.BoundingPoly.vertices", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="normalized_vertices", + full_name="google.cloud.documentai.v1beta1.BoundingPoly.normalized_vertices", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=194, + serialized_end=347, +) + +_BOUNDINGPOLY.fields_by_name["vertices"].message_type = _VERTEX +_BOUNDINGPOLY.fields_by_name["normalized_vertices"].message_type = _NORMALIZEDVERTEX +DESCRIPTOR.message_types_by_name["Vertex"] = _VERTEX +DESCRIPTOR.message_types_by_name["NormalizedVertex"] = _NORMALIZEDVERTEX +DESCRIPTOR.message_types_by_name["BoundingPoly"] = _BOUNDINGPOLY +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Vertex = _reflection.GeneratedProtocolMessageType( + "Vertex", + (_message.Message,), + dict( + DESCRIPTOR=_VERTEX, + __module__="google.cloud.documentai_v1beta1.proto.geometry_pb2", + __doc__="""X coordinate. + + + Attributes: + y: + Y coordinate. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.Vertex) + ), +) +_sym_db.RegisterMessage(Vertex) + +NormalizedVertex = _reflection.GeneratedProtocolMessageType( + "NormalizedVertex", + (_message.Message,), + dict( + DESCRIPTOR=_NORMALIZEDVERTEX, + __module__="google.cloud.documentai_v1beta1.proto.geometry_pb2", + __doc__="""X coordinate. + + + Attributes: + y: + Y coordinate. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.NormalizedVertex) + ), +) +_sym_db.RegisterMessage(NormalizedVertex) + +BoundingPoly = _reflection.GeneratedProtocolMessageType( + "BoundingPoly", + (_message.Message,), + dict( + DESCRIPTOR=_BOUNDINGPOLY, + __module__="google.cloud.documentai_v1beta1.proto.geometry_pb2", + __doc__="""A bounding polygon for the detected image annotation. + + + Attributes: + vertices: + The bounding polygon vertices. + normalized_vertices: + The bounding polygon normalized vertices. + """, + # @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta1.BoundingPoly) + ), +) +_sym_db.RegisterMessage(BoundingPoly) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/documentai/google/cloud/documentai_v1beta1/proto/geometry_pb2_grpc.py b/documentai/google/cloud/documentai_v1beta1/proto/geometry_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/documentai/google/cloud/documentai_v1beta1/proto/geometry_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/documentai/google/cloud/documentai_v1beta1/types.py b/documentai/google/cloud/documentai_v1beta1/types.py new file mode 100644 index 000000000000..d3a4fe125fe4 --- /dev/null +++ b/documentai/google/cloud/documentai_v1beta1/types.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import +import sys + +from google.api_core.protobuf_helpers import get_messages + +from google.cloud.documentai_v1beta1.proto import document_pb2 +from google.cloud.documentai_v1beta1.proto import document_understanding_pb2 +from google.cloud.documentai_v1beta1.proto import geometry_pb2 +from google.longrunning import operations_pb2 +from google.protobuf import any_pb2 +from google.protobuf import timestamp_pb2 +from google.protobuf import wrappers_pb2 +from google.rpc import status_pb2 +from google.type import color_pb2 + + +_shared_modules = [ + operations_pb2, + any_pb2, + timestamp_pb2, + wrappers_pb2, + status_pb2, + color_pb2, +] + +_local_modules = [document_pb2, document_understanding_pb2, geometry_pb2] + +names = [] + +for module in _shared_modules: # pragma: NO COVER + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) +for module in _local_modules: + for name, message in get_messages(module).items(): + message.__module__ = "google.cloud.documentai_v1beta1.types" + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/documentai/noxfile.py b/documentai/noxfile.py new file mode 100644 index 000000000000..a2eefbb6765f --- /dev/null +++ b/documentai/noxfile.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import +import os +import shutil + +import nox + + +LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) +BLACK_VERSION = "black==19.3b0" +BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +if os.path.exists("samples"): + BLACK_PATHS.append("samples") + + +@nox.session(python="3.7") +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) + session.run("black", "--check", *BLACK_PATHS) + session.run("flake8", "google", "tests") + + +@nox.session(python="3.6") +def blacken(session): + """Run black. + + Format code to uniform standard. + + This currently uses Python 3.6 due to the automated Kokoro run of synthtool. + That run uses an image that doesn't have 3.6 installed. Before updating this + check the state of the `gcp_ubuntu_config` we use for that Kokoro run. + """ + session.install(BLACK_VERSION) + session.run("black", *BLACK_PATHS) + + +@nox.session(python="3.7") +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def default(session): + # Install all test dependencies, then install this package in-place. + session.install("mock", "pytest", "pytest-cov") + for local_dep in LOCAL_DEPS: + session.install("-e", local_dep) + session.install("-e", ".") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + "--cov=google.cloud", + "--cov=tests.unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=["2.7", "3.5", "3.6", "3.7"]) +def unit(session): + """Run the unit test suite.""" + default(session) + + +@nox.session(python=["2.7", "3.7"]) +def system(session): + """Run the system test suite.""" + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + # Sanity check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install("mock", "pytest") + for local_dep in LOCAL_DEPS: + session.install("-e", local_dep) + session.install("-e", "../test_utils/") + session.install("-e", ".") + + # Run py.test against the system tests. + if system_test_exists: + session.run("py.test", "--quiet", system_test_path, *session.posargs) + if system_test_folder_exists: + session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + + +@nox.session(python="3.7") +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.7") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/documentai/setup.cfg b/documentai/setup.cfg new file mode 100644 index 000000000000..3bd555500e37 --- /dev/null +++ b/documentai/setup.cfg @@ -0,0 +1,3 @@ +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/documentai/setup.py b/documentai/setup.py new file mode 100644 index 000000000000..25a7befcff69 --- /dev/null +++ b/documentai/setup.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os + +import setuptools + +name = "google-cloud-documentai" +description = "Cloud Document AI API API client library" +version = "0.1.0" +release_status = "Development Status :: 3 - Alpha" +dependencies = [ + "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", + 'enum34; python_version < "3.4"', +] + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package for package in setuptools.find_packages() if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url="https://github.com/GoogleCloudPlatform/google-cloud-python", + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + namespace_packages=namespaces, + install_requires=dependencies, + python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", + include_package_data=True, + zip_safe=False, +) diff --git a/documentai/synth.metadata b/documentai/synth.metadata new file mode 100644 index 000000000000..43598f0cde71 --- /dev/null +++ b/documentai/synth.metadata @@ -0,0 +1,39 @@ +{ + "updateTime": "2019-11-06T22:44:32.981143Z", + "sources": [ + { + "generator": { + "name": "artman", + "version": "0.41.0", + "dockerImage": "googleapis/artman@sha256:75b38a3b073a7b243545f2332463096624c802bb1e56b8cb6f22ba1ecd325fa9" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "2275670a746ab2bc03ebba0d914b45320ea15af4", + "internalRef": "278922329" + } + }, + { + "template": { + "name": "python_library", + "origin": "synthtool.gcp", + "version": "2019.10.17" + } + } + ], + "destinations": [ + { + "client": { + "source": "googleapis", + "apiName": "documentai", + "apiVersion": "v1beta1", + "language": "python", + "generator": "gapic", + "config": "google/cloud/documentai/artman_documentai_v1beta1.yaml" + } + } + ] +} \ No newline at end of file diff --git a/documentai/synth.py b/documentai/synth.py new file mode 100644 index 000000000000..9284c3cb676d --- /dev/null +++ b/documentai/synth.py @@ -0,0 +1,53 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated parts of this library.""" + +import synthtool as s +import synthtool.gcp as gcp +import logging + +logging.basicConfig(level=logging.DEBUG) + +gapic = gcp.GAPICGenerator() +common = gcp.CommonTemplates() + +# ---------------------------------------------------------------------------- +# Generate document AI GAPIC layer +# ---------------------------------------------------------------------------- +library = gapic.py_library("documentai", "v1beta1", include_protos=True) + +excludes = ["README.rst", "nox.py", "setup.py", "docs/index.rst"] +s.move(library, excludes=excludes) + +# Fix bad docstring with stray pipe characters +s.replace( + "google/cloud/**/document_understanding_pb2.py", + """\| Specifies a known document type for deeper structure + detection\. Valid values are currently "general" and + "invoice"\. If not provided, "general" \| is used as default. + If any other value is given, the request is rejected\.""", + """Specifies a known document type for deeper structure + detection. Valid values are currently "general" and + "invoice". If not provided, "general" is used as default. + If any other value is given, the request is rejected.""", +) + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- +templated_files = common.py_library(unit_cov_level=97, cov_level=100) +s.move(templated_files) + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/videointelligence/tests/unit/gapic/v1beta1/test_video_intelligence_service_client_v1beta1.py b/documentai/tests/unit/gapic/v1beta1/test_document_understanding_service_client_v1beta1.py similarity index 70% rename from videointelligence/tests/unit/gapic/v1beta1/test_video_intelligence_service_client_v1beta1.py rename to documentai/tests/unit/gapic/v1beta1/test_document_understanding_service_client_v1beta1.py index 27926f1cf767..eb075a52248a 100644 --- a/videointelligence/tests/unit/gapic/v1beta1/test_video_intelligence_service_client_v1beta1.py +++ b/documentai/tests/unit/gapic/v1beta1/test_document_understanding_service_client_v1beta1.py @@ -21,9 +21,8 @@ from google.rpc import status_pb2 -from google.cloud import videointelligence_v1beta1 -from google.cloud.videointelligence_v1beta1 import enums -from google.cloud.videointelligence_v1beta1.proto import video_intelligence_pb2 +from google.cloud import documentai_v1beta1 +from google.cloud.documentai_v1beta1.proto import document_understanding_pb2 from google.longrunning import operations_pb2 @@ -63,15 +62,15 @@ class CustomException(Exception): pass -class TestVideoIntelligenceServiceClient(object): - def test_annotate_video(self): +class TestDocumentUnderstandingServiceClient(object): + def test_batch_process_documents(self): # Setup Expected Response expected_response = {} - expected_response = video_intelligence_pb2.AnnotateVideoResponse( + expected_response = document_understanding_pb2.BatchProcessDocumentsResponse( **expected_response ) operation = operations_pb2.Operation( - name="operations/test_annotate_video", done=True + name="operations/test_batch_process_documents", done=True ) operation.response.Pack(expected_response) @@ -80,29 +79,27 @@ def test_annotate_video(self): patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel - client = videointelligence_v1beta1.VideoIntelligenceServiceClient() + client = documentai_v1beta1.DocumentUnderstandingServiceClient() # Setup Request - input_uri = "gs://cloud-samples-data/video/cat.mp4" - features_element = enums.Feature.LABEL_DETECTION - features = [features_element] + requests = [] - response = client.annotate_video(input_uri, features) + response = client.batch_process_documents(requests) result = response.result() assert expected_response == result assert len(channel.requests) == 1 - expected_request = video_intelligence_pb2.AnnotateVideoRequest( - input_uri=input_uri, features=features + expected_request = document_understanding_pb2.BatchProcessDocumentsRequest( + requests=requests ) actual_request = channel.requests[0][1] assert expected_request == actual_request - def test_annotate_video_exception(self): + def test_batch_process_documents_exception(self): # Setup Response error = status_pb2.Status() operation = operations_pb2.Operation( - name="operations/test_annotate_video_exception", done=True + name="operations/test_batch_process_documents_exception", done=True ) operation.error.CopyFrom(error) @@ -111,13 +108,11 @@ def test_annotate_video_exception(self): patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel - client = videointelligence_v1beta1.VideoIntelligenceServiceClient() + client = documentai_v1beta1.DocumentUnderstandingServiceClient() # Setup Request - input_uri = "gs://cloud-samples-data/video/cat.mp4" - features_element = enums.Feature.LABEL_DETECTION - features = [features_element] + requests = [] - response = client.annotate_video(input_uri, features) + response = client.batch_process_documents(requests) exception = response.exception() assert exception.errors[0] == error diff --git a/error_reporting/CHANGELOG.md b/error_reporting/CHANGELOG.md index 81c20812af9f..aa3924d1f596 100644 --- a/error_reporting/CHANGELOG.md +++ b/error_reporting/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## 0.33.0 + +10-22-2019 12:10 PDT + +### New Features +- Add `client_options` to constructor ([#9152](https://github.com/googleapis/google-cloud-python/pull/9152)) + +### Dependencies +- Pin `google-cloud-logging >= 1.14.0, < 2.0.0dev`. ([#9476](https://github.com/googleapis/google-cloud-python/pull/9476)) + +### Documentation +- Remove references to the old authentication credentials. ([#9456](https://github.com/googleapis/google-cloud-python/pull/9456)) +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for `gh-pages`, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) + +### Internal / Testing Changes +- Harden `test_report_exception` systest by increasing `max_tries`. ([#9396](https://github.com/googleapis/google-cloud-python/pull/9396)) + ## 0.32.1 08-23-2019 10:12 PDT diff --git a/error_reporting/README.rst b/error_reporting/README.rst index 39561dd92cb3..bce80fd49e14 100644 --- a/error_reporting/README.rst +++ b/error_reporting/README.rst @@ -85,7 +85,7 @@ Windows Next Steps ~~~~~~~~~~ -- Read the `Client Library Documentation`_ for Google Cloud Datastore API +- Read the `Client Library Documentation`_ for Google Cloud Datastore API to see other available methods on the client. - Read the `Product documentation`_ to learn more about the product and see How-to Guides. diff --git a/error_reporting/docs/conf.py b/error_reporting/docs/conf.py index 97c5250b84c8..db959a13ca88 100644 --- a/error_reporting/docs/conf.py +++ b/error_reporting/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/error_reporting/google/cloud/error_reporting/_gapic.py b/error_reporting/google/cloud/error_reporting/_gapic.py index b1925ff6a491..0c6ec9e60a1a 100644 --- a/error_reporting/google/cloud/error_reporting/_gapic.py +++ b/error_reporting/google/cloud/error_reporting/_gapic.py @@ -29,7 +29,9 @@ def make_report_error_api(client): :returns: An Error Reporting API instance. """ gapic_api = report_errors_service_client.ReportErrorsServiceClient( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) return _ErrorReportingGapicApi(gapic_api, client.project) diff --git a/error_reporting/google/cloud/error_reporting/_logging.py b/error_reporting/google/cloud/error_reporting/_logging.py index 5eaa4693c96f..5832cc7e2d24 100644 --- a/error_reporting/google/cloud/error_reporting/_logging.py +++ b/error_reporting/google/cloud/error_reporting/_logging.py @@ -30,12 +30,12 @@ class _ErrorReportingLoggingAPI(object): passed falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :type credentials: :class:`google.auth.credentials.Credentials` or :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no - ``_http`` object is passed), falls back to the default - inferred from the environment. + :param credentials: The authorization credentials to attach to requests. + These credentials identify this application to the service. + If none are specified, the client will attempt to ascertain + the credentials from the environment. :type _http: :class:`~requests.Session` :param _http: (Optional) HTTP object to make requests. Can be any object @@ -54,11 +54,27 @@ class _ErrorReportingLoggingAPI(object): requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own library or partner tool. + + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options + on the client. API Endpoint should be set through client_options. """ - def __init__(self, project, credentials=None, _http=None, client_info=None): + def __init__( + self, + project, + credentials=None, + _http=None, + client_info=None, + client_options=None, + ): self.logging_client = google.cloud.logging.client.Client( - project, credentials, _http=_http, client_info=client_info + project, + credentials, + _http=_http, + client_info=client_info, + client_options=client_options, ) def report_error_event(self, error_report): diff --git a/error_reporting/google/cloud/error_reporting/client.py b/error_reporting/google/cloud/error_reporting/client.py index 4fdae4c69e25..c4cb816ead75 100644 --- a/error_reporting/google/cloud/error_reporting/client.py +++ b/error_reporting/google/cloud/error_reporting/client.py @@ -96,12 +96,12 @@ class Client(ClientWithProject): passed falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :type credentials: :class:`google.auth.credentials.Credentials` or :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no - ``_http`` object is passed), falls back to the default - inferred from the environment. + :param credentials: The authorization credentials to attach to requests. + These credentials identify this application to the service. + If none are specified, the client will attempt to ascertain + the credentials from the environment. :type _http: :class:`~requests.Session` :param _http: (Optional) HTTP object to make requests. Can be any object @@ -143,6 +143,11 @@ class Client(ClientWithProject): you only need to set this if you're developing your own library or partner tool. + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options + on the client. API Endpoint should be set through client_options. + :raises: :class:`ValueError` if the project is neither passed in nor set in the environment. """ @@ -158,6 +163,7 @@ def __init__( service=None, version=None, client_info=_CLIENT_INFO, + client_options=None, _use_grpc=None, ): super(Client, self).__init__( @@ -168,6 +174,7 @@ def __init__( self.service = service if service else self.DEFAULT_SERVICE self.version = version self._client_info = client_info + self._client_options = client_options if _use_grpc is None: self._use_grpc = _USE_GRPC @@ -195,7 +202,11 @@ def report_errors_api(self): self._report_errors_api = make_report_error_api(self) else: self._report_errors_api = _ErrorReportingLoggingAPI( - self.project, self._credentials, self._http, self._client_info + self.project, + self._credentials, + self._http, + self._client_info, + self._client_options, ) return self._report_errors_api diff --git a/error_reporting/setup.py b/error_reporting/setup.py index da91c2707903..80a0350ad79a 100644 --- a/error_reporting/setup.py +++ b/error_reporting/setup.py @@ -22,13 +22,13 @@ name = "google-cloud-error-reporting" description = "Stackdriver Error Reporting API client library" -version = "0.32.1" +version = "0.33.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 3 - Alpha" -dependencies = ["google-cloud-logging>=1.11.0, <2.0dev"] +dependencies = ["google-cloud-logging>=1.14.0, <2.0dev"] extras = {} diff --git a/error_reporting/tests/system/test_system.py b/error_reporting/tests/system/test_system.py index b72223e2ae0a..cf454aecdbd1 100644 --- a/error_reporting/tests/system/test_system.py +++ b/error_reporting/tests/system/test_system.py @@ -117,7 +117,7 @@ def test_report_exception(self): is_one = functools.partial(operator.eq, 1) is_one.__name__ = "is_one" # partial() has no name. - retry = RetryResult(is_one, max_tries=6) + retry = RetryResult(is_one, max_tries=8) wrapped_get_count = retry(_get_error_count) error_count = wrapped_get_count(class_name, Config.CLIENT) diff --git a/error_reporting/tests/unit/test__gapic.py b/error_reporting/tests/unit/test__gapic.py index 836f46b82495..00940f466df0 100644 --- a/error_reporting/tests/unit/test__gapic.py +++ b/error_reporting/tests/unit/test__gapic.py @@ -25,7 +25,9 @@ def _call_fut(client): return make_report_error_api(client) def test_make_report_error_api(self): - client = mock.Mock(spec=["project", "_credentials", "_client_info"]) + client = mock.Mock( + spec=["project", "_credentials", "_client_info", "_client_options"] + ) # Call the function being tested. patch = mock.patch( @@ -41,7 +43,9 @@ def test_make_report_error_api(self): self.assertIs(report_error_client._project, client.project) self.assertIs(report_error_client._gapic_api, patched.return_value) patched.assert_called_once_with( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) diff --git a/error_reporting/tests/unit/test__logging.py b/error_reporting/tests/unit/test__logging.py index e2b0638b986e..726eaabfec75 100644 --- a/error_reporting/tests/unit/test__logging.py +++ b/error_reporting/tests/unit/test__logging.py @@ -40,7 +40,7 @@ def test_ctor_defaults(self, mocked_cls): self.assertIs(logging_api.logging_client, mocked_cls.return_value) mocked_cls.assert_called_once_with( - self.PROJECT, credentials, _http=None, client_info=None + self.PROJECT, credentials, _http=None, client_info=None, client_options=None ) @mock.patch("google.cloud.logging.client.Client") @@ -48,14 +48,23 @@ def test_ctor_explicit(self, mocked_cls): credentials = _make_credentials() http = mock.Mock() client_info = mock.Mock() + client_options = mock.Mock() logging_api = self._make_one( - self.PROJECT, credentials, _http=http, client_info=client_info + self.PROJECT, + credentials, + _http=http, + client_info=client_info, + client_options=client_options, ) self.assertIs(logging_api.logging_client, mocked_cls.return_value) mocked_cls.assert_called_once_with( - self.PROJECT, credentials, _http=http, client_info=client_info + self.PROJECT, + credentials, + _http=http, + client_info=client_info, + client_options=client_options, ) @mock.patch("google.cloud.logging.client.Client") diff --git a/error_reporting/tests/unit/test_client.py b/error_reporting/tests/unit/test_client.py index 5e4dc925a65f..3a7290e8aa96 100644 --- a/error_reporting/tests/unit/test_client.py +++ b/error_reporting/tests/unit/test_client.py @@ -67,16 +67,19 @@ def test_ctor_defaults(self, default_mock): def test_ctor_explicit(self): credentials = _make_credentials() client_info = mock.Mock() + client_options = mock.Mock() client = self._make_one( project=self.PROJECT, credentials=credentials, service=self.SERVICE, version=self.VERSION, client_info=client_info, + client_options=client_options, ) self.assertEqual(client.service, self.SERVICE) self.assertEqual(client.version, self.VERSION) self.assertIs(client._client_info, client_info) + self.assertIs(client._client_options, client_options) def test_report_errors_api_already(self): credentials = _make_credentials() @@ -87,11 +90,13 @@ def test_report_errors_api_already(self): def test_report_errors_api_wo_grpc(self): credentials = _make_credentials() client_info = mock.Mock() + client_options = mock.Mock() http = mock.Mock() client = self._make_one( project=self.PROJECT, credentials=credentials, client_info=client_info, + client_options=client_options, _http=http, _use_grpc=False, ) @@ -103,7 +108,9 @@ def test_report_errors_api_wo_grpc(self): api = client.report_errors_api self.assertIs(api, patched.return_value) - patched.assert_called_once_with(self.PROJECT, credentials, http, client_info) + patched.assert_called_once_with( + self.PROJECT, credentials, http, client_info, client_options + ) def test_report_errors_api_w_grpc(self): credentials = _make_credentials() diff --git a/firestore/CHANGELOG.md b/firestore/CHANGELOG.md index 918252cd8628..185d6fe37c6d 100644 --- a/firestore/CHANGELOG.md +++ b/firestore/CHANGELOG.md @@ -5,6 +5,39 @@ [1]: https://pypi.org/project/google-cloud-firestore/#history +## 1.6.0 + +11-06-2019 13:49 PST + +### New Features +- Surface new 'IN' and 'ARRAY_CONTAINS_ANY' query operators. ([#9541](https://github.com/googleapis/google-cloud-python/pull/9541)) + +## 1.5.0 + +10-15-2019 06:45 PDT + + +### Implementation Changes +- Expand dotted keys in mappings used as cursors. ([#8568](https://github.com/googleapis/google-cloud-python/pull/8568)) +- Tweak GAPIC client configuration (via synth). ([#9173](https://github.com/googleapis/google-cloud-python/pull/9173)) + +### New Features +- Add `IN`, `ARRAY_CONTAINS_ANY` operators; update docstrings (via synth). ([#9439](https://github.com/googleapis/google-cloud-python/pull/9439)) +- Add `COLLECTION_GROUP` to `Index.QueryScope` enum; update docstrings (via synth). ([#9253](https://github.com/googleapis/google-cloud-python/pull/9253)) +- Add `client_options` to v1 client. ([#9048](https://github.com/googleapis/google-cloud-python/pull/9048)) + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +### Documentation +- Update README example to use non-deprecated `query.get`. ([#9235](https://github.com/googleapis/google-cloud-python/pull/9235)) +- Remove duplicated word in README. ([#9297](https://github.com/googleapis/google-cloud-python/pull/9297)) +- Fix intersphinx reference to `requests`. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core refs`. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Add license file. ([#9109](https://github.com/googleapis/google-cloud-python/pull/9109)) +- Fix reference to library name ([#9047](https://github.com/googleapis/google-cloud-python/pull/9047)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) + ## 1.4.0 08-06-2019 11:43 PDT diff --git a/firestore/README.rst b/firestore/README.rst index de8e38b91048..bb109a0efcd7 100644 --- a/firestore/README.rst +++ b/firestore/README.rst @@ -102,15 +102,14 @@ Example Usage # Then query for documents users_ref = db.collection(u'users') - docs = users_ref.get() - for doc in docs: + for doc in users_ref.stream(): print(u'{} => {}'.format(doc.id, doc.to_dict())) Next Steps ~~~~~~~~~~ - Read the `Client Library Documentation`_ for Google Cloud Firestore API - API to see other available methods on the client. + to see other available methods on the client. - Read the `Product Documentation`_ to learn more about the product and see How-to Guides. diff --git a/firestore/docs/conf.py b/firestore/docs/conf.py index 15df1fa1182f..3f6a5971ac4a 100644 --- a/firestore/docs/conf.py +++ b/firestore/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/firestore/docs/index.rst b/firestore/docs/index.rst index d355d8aec4c8..b8157df9bd0c 100644 --- a/firestore/docs/index.rst +++ b/firestore/docs/index.rst @@ -1,5 +1,12 @@ .. include:: README.rst +.. note:: + + Because the firestore client uses :mod:`grpcio` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.Pool` or + :class:`multiprocessing.Process`. API Reference ------------- diff --git a/firestore/google/cloud/firestore_admin_v1/gapic/enums.py b/firestore/google/cloud/firestore_admin_v1/gapic/enums.py index c8b31fc6255b..41247024895a 100644 --- a/firestore/google/cloud/firestore_admin_v1/gapic/enums.py +++ b/firestore/google/cloud/firestore_admin_v1/gapic/enums.py @@ -74,10 +74,14 @@ class QueryScope(enum.IntEnum): COLLECTION (int): Indexes with a collection query scope specified allow queries against a collection that is the child of a specific document, specified at query time, and that has the collection id specified by the index. + COLLECTION_GROUP (int): Indexes with a collection group query scope specified allow queries + against all collections that has the collection id specified by the + index. """ QUERY_SCOPE_UNSPECIFIED = 0 COLLECTION = 1 + COLLECTION_GROUP = 2 class State(enum.IntEnum): """ diff --git a/firestore/google/cloud/firestore_admin_v1/proto/field.proto b/firestore/google/cloud/firestore_admin_v1/proto/field.proto index a24e1aaf1dc4..14891596d139 100644 --- a/firestore/google/cloud/firestore_admin_v1/proto/field.proto +++ b/firestore/google/cloud/firestore_admin_v1/proto/field.proto @@ -28,7 +28,6 @@ option java_package = "com.google.firestore.admin.v1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1"; - // Represents a single field in the database. // // Fields are grouped by their "Collection Group", which represent all @@ -39,14 +38,12 @@ message Field { // The indexes supported for this field. repeated Index indexes = 1; - // Output only. - // When true, the `Field`'s index configuration is set from the + // Output only. When true, the `Field`'s index configuration is set from the // configuration specified by the `ancestor_field`. // When false, the `Field`'s index configuration is defined explicitly. bool uses_ancestor_config = 2; - // Output only. - // Specifies the resource name of the `Field` from which this field's + // Output only. Specifies the resource name of the `Field` from which this field's // index configuration is set (when `uses_ancestor_config` is true), // or from which it *would* be set if this field had no index configuration // (when `uses_ancestor_config` is false). diff --git a/firestore/google/cloud/firestore_admin_v1/proto/firestore_admin.proto b/firestore/google/cloud/firestore_admin_v1/proto/firestore_admin.proto index e753686b200e..234827bef228 100644 --- a/firestore/google/cloud/firestore_admin_v1/proto/firestore_admin.proto +++ b/firestore/google/cloud/firestore_admin_v1/proto/firestore_admin.proto @@ -23,6 +23,7 @@ import "google/firestore/admin/v1/index.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; +import "google/api/client.proto"; option csharp_namespace = "Google.Cloud.Firestore.Admin.V1"; option go_package = "google.golang.org/genproto/googleapis/firestore/admin/v1;admin"; @@ -32,10 +33,14 @@ option java_package = "com.google.firestore.admin.v1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1"; - // Operations are created by service `FirestoreAdmin`, but are accessed via // service `google.longrunning.Operations`. service FirestoreAdmin { + option (google.api.default_host) = "firestore.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/datastore"; + // Creates a composite index. This returns a [google.longrunning.Operation][google.longrunning.Operation] // which may be used to track the status of the creation. The metadata for // the operation will be the type [IndexOperationMetadata][google.firestore.admin.v1.IndexOperationMetadata]. diff --git a/firestore/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2.py b/firestore/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2.py index 7346c4b4e789..bc43cbcf366b 100644 --- a/firestore/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2.py +++ b/firestore/google/cloud/firestore_admin_v1/proto/firestore_admin_pb2.py @@ -27,6 +27,7 @@ ) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -37,7 +38,7 @@ "\n\035com.google.firestore.admin.v1B\023FirestoreAdminProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1" ), serialized_pb=_b( - '\n;google/cloud/firestore/admin_v1/proto/firestore_admin.proto\x12\x19google.firestore.admin.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore/admin_v1/proto/field.proto\x1a\x31google/cloud/firestore/admin_v1/proto/index.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"U\n\x12\x43reateIndexRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12/\n\x05index\x18\x02 \x01(\x0b\x32 .google.firestore.admin.v1.Index"[\n\x12ListIndexesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"a\n\x13ListIndexesResponse\x12\x31\n\x07indexes\x18\x01 \x03(\x0b\x32 .google.firestore.admin.v1.Index\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x1f\n\x0fGetIndexRequest\x12\x0c\n\x04name\x18\x01 \x01(\t""\n\x12\x44\x65leteIndexRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"v\n\x12UpdateFieldRequest\x12/\n\x05\x66ield\x18\x01 \x01(\x0b\x32 .google.firestore.admin.v1.Field\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\x1f\n\x0fGetFieldRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"Z\n\x11ListFieldsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"_\n\x12ListFieldsResponse\x12\x30\n\x06\x66ields\x18\x01 \x03(\x0b\x32 .google.firestore.admin.v1.Field\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Y\n\x16\x45xportDocumentsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0e\x63ollection_ids\x18\x02 \x03(\t\x12\x19\n\x11output_uri_prefix\x18\x03 \x01(\t"X\n\x16ImportDocumentsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0e\x63ollection_ids\x18\x02 \x03(\t\x12\x18\n\x10input_uri_prefix\x18\x03 \x01(\t2\x85\x0c\n\x0e\x46irestoreAdmin\x12\xaa\x01\n\x0b\x43reateIndex\x12-.google.firestore.admin.v1.CreateIndexRequest\x1a\x1d.google.longrunning.Operation"M\x82\xd3\xe4\x93\x02G">/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes:\x05index\x12\xb4\x01\n\x0bListIndexes\x12-.google.firestore.admin.v1.ListIndexesRequest\x1a..google.firestore.admin.v1.ListIndexesResponse"F\x82\xd3\xe4\x93\x02@\x12>/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes\x12\xa0\x01\n\x08GetIndex\x12*.google.firestore.admin.v1.GetIndexRequest\x1a .google.firestore.admin.v1.Index"F\x82\xd3\xe4\x93\x02@\x12>/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}\x12\x9c\x01\n\x0b\x44\x65leteIndex\x12-.google.firestore.admin.v1.DeleteIndexRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02@*>/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}\x12\x9f\x01\n\x08GetField\x12*.google.firestore.admin.v1.GetFieldRequest\x1a .google.firestore.admin.v1.Field"E\x82\xd3\xe4\x93\x02?\x12=/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}\x12\xaf\x01\n\x0bUpdateField\x12-.google.firestore.admin.v1.UpdateFieldRequest\x1a\x1d.google.longrunning.Operation"R\x82\xd3\xe4\x93\x02L2C/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}:\x05\x66ield\x12\xb0\x01\n\nListFields\x12,.google.firestore.admin.v1.ListFieldsRequest\x1a-.google.firestore.admin.v1.ListFieldsResponse"E\x82\xd3\xe4\x93\x02?\x12=/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields\x12\xa1\x01\n\x0f\x45xportDocuments\x12\x31.google.firestore.admin.v1.ExportDocumentsRequest\x1a\x1d.google.longrunning.Operation"<\x82\xd3\xe4\x93\x02\x36"1/v1/{name=projects/*/databases/*}:exportDocuments:\x01*\x12\xa1\x01\n\x0fImportDocuments\x12\x31.google.firestore.admin.v1.ImportDocumentsRequest\x1a\x1d.google.longrunning.Operation"<\x82\xd3\xe4\x93\x02\x36"1/v1/{name=projects/*/databases/*}:importDocuments:\x01*B\xc1\x01\n\x1d\x63om.google.firestore.admin.v1B\x13\x46irestoreAdminProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1b\x06proto3' + '\n;google/cloud/firestore/admin_v1/proto/firestore_admin.proto\x12\x19google.firestore.admin.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore/admin_v1/proto/field.proto\x1a\x31google/cloud/firestore/admin_v1/proto/index.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/api/client.proto"U\n\x12\x43reateIndexRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12/\n\x05index\x18\x02 \x01(\x0b\x32 .google.firestore.admin.v1.Index"[\n\x12ListIndexesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"a\n\x13ListIndexesResponse\x12\x31\n\x07indexes\x18\x01 \x03(\x0b\x32 .google.firestore.admin.v1.Index\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x1f\n\x0fGetIndexRequest\x12\x0c\n\x04name\x18\x01 \x01(\t""\n\x12\x44\x65leteIndexRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"v\n\x12UpdateFieldRequest\x12/\n\x05\x66ield\x18\x01 \x01(\x0b\x32 .google.firestore.admin.v1.Field\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\x1f\n\x0fGetFieldRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"Z\n\x11ListFieldsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"_\n\x12ListFieldsResponse\x12\x30\n\x06\x66ields\x18\x01 \x03(\x0b\x32 .google.firestore.admin.v1.Field\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Y\n\x16\x45xportDocumentsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0e\x63ollection_ids\x18\x02 \x03(\t\x12\x19\n\x11output_uri_prefix\x18\x03 \x01(\t"X\n\x16ImportDocumentsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0e\x63ollection_ids\x18\x02 \x03(\t\x12\x18\n\x10input_uri_prefix\x18\x03 \x01(\t2\xfd\x0c\n\x0e\x46irestoreAdmin\x12\xaa\x01\n\x0b\x43reateIndex\x12-.google.firestore.admin.v1.CreateIndexRequest\x1a\x1d.google.longrunning.Operation"M\x82\xd3\xe4\x93\x02G">/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes:\x05index\x12\xb4\x01\n\x0bListIndexes\x12-.google.firestore.admin.v1.ListIndexesRequest\x1a..google.firestore.admin.v1.ListIndexesResponse"F\x82\xd3\xe4\x93\x02@\x12>/v1/{parent=projects/*/databases/*/collectionGroups/*}/indexes\x12\xa0\x01\n\x08GetIndex\x12*.google.firestore.admin.v1.GetIndexRequest\x1a .google.firestore.admin.v1.Index"F\x82\xd3\xe4\x93\x02@\x12>/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}\x12\x9c\x01\n\x0b\x44\x65leteIndex\x12-.google.firestore.admin.v1.DeleteIndexRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02@*>/v1/{name=projects/*/databases/*/collectionGroups/*/indexes/*}\x12\x9f\x01\n\x08GetField\x12*.google.firestore.admin.v1.GetFieldRequest\x1a .google.firestore.admin.v1.Field"E\x82\xd3\xe4\x93\x02?\x12=/v1/{name=projects/*/databases/*/collectionGroups/*/fields/*}\x12\xaf\x01\n\x0bUpdateField\x12-.google.firestore.admin.v1.UpdateFieldRequest\x1a\x1d.google.longrunning.Operation"R\x82\xd3\xe4\x93\x02L2C/v1/{field.name=projects/*/databases/*/collectionGroups/*/fields/*}:\x05\x66ield\x12\xb0\x01\n\nListFields\x12,.google.firestore.admin.v1.ListFieldsRequest\x1a-.google.firestore.admin.v1.ListFieldsResponse"E\x82\xd3\xe4\x93\x02?\x12=/v1/{parent=projects/*/databases/*/collectionGroups/*}/fields\x12\xa1\x01\n\x0f\x45xportDocuments\x12\x31.google.firestore.admin.v1.ExportDocumentsRequest\x1a\x1d.google.longrunning.Operation"<\x82\xd3\xe4\x93\x02\x36"1/v1/{name=projects/*/databases/*}:exportDocuments:\x01*\x12\xa1\x01\n\x0fImportDocuments\x12\x31.google.firestore.admin.v1.ImportDocumentsRequest\x1a\x1d.google.longrunning.Operation"<\x82\xd3\xe4\x93\x02\x36"1/v1/{name=projects/*/databases/*}:importDocuments:\x01*\x1av\xca\x41\x18\x66irestore.googleapis.com\xd2\x41Xhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastoreB\xc1\x01\n\x1d\x63om.google.firestore.admin.v1B\x13\x46irestoreAdminProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -46,6 +47,7 @@ google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -102,8 +104,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=322, - serialized_end=407, + serialized_start=347, + serialized_end=432, ) @@ -195,8 +197,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=409, - serialized_end=500, + serialized_start=434, + serialized_end=525, ) @@ -252,8 +254,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=502, - serialized_end=599, + serialized_start=527, + serialized_end=624, ) @@ -291,8 +293,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=601, - serialized_end=632, + serialized_start=626, + serialized_end=657, ) @@ -330,8 +332,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=634, - serialized_end=668, + serialized_start=659, + serialized_end=693, ) @@ -387,8 +389,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=670, - serialized_end=788, + serialized_start=695, + serialized_end=813, ) @@ -426,8 +428,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=790, - serialized_end=821, + serialized_start=815, + serialized_end=846, ) @@ -519,8 +521,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=823, - serialized_end=913, + serialized_start=848, + serialized_end=938, ) @@ -576,8 +578,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=915, - serialized_end=1010, + serialized_start=940, + serialized_end=1035, ) @@ -651,8 +653,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1012, - serialized_end=1101, + serialized_start=1037, + serialized_end=1126, ) @@ -726,8 +728,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1103, - serialized_end=1191, + serialized_start=1128, + serialized_end=1216, ) _CREATEINDEXREQUEST.fields_by_name[ @@ -1047,9 +1049,11 @@ full_name="google.firestore.admin.v1.FirestoreAdmin", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1194, - serialized_end=2735, + serialized_options=_b( + "\312A\030firestore.googleapis.com\322AXhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastore" + ), + serialized_start=1219, + serialized_end=2880, methods=[ _descriptor.MethodDescriptor( name="CreateIndex", diff --git a/firestore/google/cloud/firestore_admin_v1/proto/index.proto b/firestore/google/cloud/firestore_admin_v1/proto/index.proto index 94941d3a0707..f2038c581c47 100644 --- a/firestore/google/cloud/firestore_admin_v1/proto/index.proto +++ b/firestore/google/cloud/firestore_admin_v1/proto/index.proto @@ -27,7 +27,6 @@ option java_package = "com.google.firestore.admin.v1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1"; - // Cloud Firestore indexes enable simple and complex queries against // documents in a database. message Index { @@ -82,6 +81,11 @@ message Index { // against a collection that is the child of a specific document, specified // at query time, and that has the collection id specified by the index. COLLECTION = 1; + + // Indexes with a collection group query scope specified allow queries + // against all collections that has the collection id specified by the + // index. + COLLECTION_GROUP = 2; } // The state of an index. During index creation, an index will be in the @@ -114,8 +118,7 @@ message Index { NEEDS_REPAIR = 3; } - // Output only. - // A server defined name for this index. + // Output only. A server defined name for this index. // The form of this name for composite indexes will be: // `projects/{project_id}/databases/{database_id}/collectionGroups/{collection_id}/indexes/{composite_index_id}` // For single field indexes, this field will be empty. @@ -143,7 +146,6 @@ message Index { // field path equal to the field path of the associated field. repeated IndexField fields = 3; - // Output only. - // The serving state of the index. + // Output only. The serving state of the index. State state = 4; } diff --git a/firestore/google/cloud/firestore_admin_v1/proto/index_pb2.py b/firestore/google/cloud/firestore_admin_v1/proto/index_pb2.py index cb089c09a31b..2752412494f3 100644 --- a/firestore/google/cloud/firestore_admin_v1/proto/index_pb2.py +++ b/firestore/google/cloud/firestore_admin_v1/proto/index_pb2.py @@ -26,7 +26,7 @@ "\n\035com.google.firestore.admin.v1B\nIndexProtoP\001Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\242\002\004GCFS\252\002\037Google.Cloud.Firestore.Admin.V1\312\002\037Google\\Cloud\\Firestore\\Admin\\V1" ), serialized_pb=_b( - '\n1google/cloud/firestore/admin_v1/proto/index.proto\x12\x19google.firestore.admin.v1\x1a\x1cgoogle/api/annotations.proto"\x91\x05\n\x05Index\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\x0bquery_scope\x18\x02 \x01(\x0e\x32+.google.firestore.admin.v1.Index.QueryScope\x12;\n\x06\x66ields\x18\x03 \x03(\x0b\x32+.google.firestore.admin.v1.Index.IndexField\x12\x35\n\x05state\x18\x04 \x01(\x0e\x32&.google.firestore.admin.v1.Index.State\x1a\xbd\x02\n\nIndexField\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12\x42\n\x05order\x18\x02 \x01(\x0e\x32\x31.google.firestore.admin.v1.Index.IndexField.OrderH\x00\x12O\n\x0c\x61rray_config\x18\x03 \x01(\x0e\x32\x37.google.firestore.admin.v1.Index.IndexField.ArrayConfigH\x00"=\n\x05Order\x12\x15\n\x11ORDER_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"9\n\x0b\x41rrayConfig\x12\x1c\n\x18\x41RRAY_CONFIG_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43ONTAINS\x10\x01\x42\x0c\n\nvalue_mode"9\n\nQueryScope\x12\x1b\n\x17QUERY_SCOPE_UNSPECIFIED\x10\x00\x12\x0e\n\nCOLLECTION\x10\x01"I\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x10\n\x0cNEEDS_REPAIR\x10\x03\x42\xb8\x01\n\x1d\x63om.google.firestore.admin.v1B\nIndexProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1b\x06proto3' + '\n1google/cloud/firestore/admin_v1/proto/index.proto\x12\x19google.firestore.admin.v1\x1a\x1cgoogle/api/annotations.proto"\xa7\x05\n\x05Index\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\x0bquery_scope\x18\x02 \x01(\x0e\x32+.google.firestore.admin.v1.Index.QueryScope\x12;\n\x06\x66ields\x18\x03 \x03(\x0b\x32+.google.firestore.admin.v1.Index.IndexField\x12\x35\n\x05state\x18\x04 \x01(\x0e\x32&.google.firestore.admin.v1.Index.State\x1a\xbd\x02\n\nIndexField\x12\x12\n\nfield_path\x18\x01 \x01(\t\x12\x42\n\x05order\x18\x02 \x01(\x0e\x32\x31.google.firestore.admin.v1.Index.IndexField.OrderH\x00\x12O\n\x0c\x61rray_config\x18\x03 \x01(\x0e\x32\x37.google.firestore.admin.v1.Index.IndexField.ArrayConfigH\x00"=\n\x05Order\x12\x15\n\x11ORDER_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"9\n\x0b\x41rrayConfig\x12\x1c\n\x18\x41RRAY_CONFIG_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43ONTAINS\x10\x01\x42\x0c\n\nvalue_mode"O\n\nQueryScope\x12\x1b\n\x17QUERY_SCOPE_UNSPECIFIED\x10\x00\x12\x0e\n\nCOLLECTION\x10\x01\x12\x14\n\x10\x43OLLECTION_GROUP\x10\x02"I\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x10\n\x0cNEEDS_REPAIR\x10\x03\x42\xb8\x01\n\x1d\x63om.google.firestore.admin.v1B\nIndexProtoP\x01Z>google.golang.org/genproto/googleapis/firestore/admin/v1;admin\xa2\x02\x04GCFS\xaa\x02\x1fGoogle.Cloud.Firestore.Admin.V1\xca\x02\x1fGoogle\\Cloud\\Firestore\\Admin\\V1b\x06proto3' ), dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) @@ -99,11 +99,18 @@ _descriptor.EnumValueDescriptor( name="COLLECTION", index=1, number=1, serialized_options=None, type=None ), + _descriptor.EnumValueDescriptor( + name="COLLECTION_GROUP", + index=2, + number=2, + serialized_options=None, + type=None, + ), ], containing_type=None, serialized_options=None, serialized_start=636, - serialized_end=693, + serialized_end=715, ) _sym_db.RegisterEnumDescriptor(_INDEX_QUERYSCOPE) @@ -132,8 +139,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=695, - serialized_end=768, + serialized_start=717, + serialized_end=790, ) _sym_db.RegisterEnumDescriptor(_INDEX_STATE) @@ -309,7 +316,7 @@ extension_ranges=[], oneofs=[], serialized_start=111, - serialized_end=768, + serialized_end=790, ) _INDEX_INDEXFIELD.fields_by_name["order"].enum_type = _INDEX_INDEXFIELD_ORDER diff --git a/firestore/google/cloud/firestore_admin_v1/proto/location.proto b/firestore/google/cloud/firestore_admin_v1/proto/location.proto index 7b56051a5c36..d9dc6f9b9820 100644 --- a/firestore/google/cloud/firestore_admin_v1/proto/location.proto +++ b/firestore/google/cloud/firestore_admin_v1/proto/location.proto @@ -28,7 +28,6 @@ option java_package = "com.google.firestore.admin.v1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1"; - // The metadata message for [google.cloud.location.Location.metadata][google.cloud.location.Location.metadata]. message LocationMetadata { diff --git a/firestore/google/cloud/firestore_admin_v1/proto/operation.proto b/firestore/google/cloud/firestore_admin_v1/proto/operation.proto index d333d9b7de25..6494ab7cba99 100644 --- a/firestore/google/cloud/firestore_admin_v1/proto/operation.proto +++ b/firestore/google/cloud/firestore_admin_v1/proto/operation.proto @@ -29,7 +29,6 @@ option java_package = "com.google.firestore.admin.v1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\Admin\\V1"; - // Metadata for [google.longrunning.Operation][google.longrunning.Operation] results from // [FirestoreAdmin.CreateIndex][google.firestore.admin.v1.FirestoreAdmin.CreateIndex]. message IndexOperationMetadata { diff --git a/firestore/google/cloud/firestore_v1/collection.py b/firestore/google/cloud/firestore_v1/collection.py index 165e0dfb3afa..27c3eeaa3155 100644 --- a/firestore/google/cloud/firestore_v1/collection.py +++ b/firestore/google/cloud/firestore_v1/collection.py @@ -20,7 +20,6 @@ from google.cloud.firestore_v1 import _helpers from google.cloud.firestore_v1 import query as query_mod -from google.cloud.firestore_v1.proto import document_pb2 from google.cloud.firestore_v1.watch import Watch from google.cloud.firestore_v1 import document @@ -157,27 +156,11 @@ def add(self, document_data, document_id=None): and the document already exists. """ if document_id is None: - parent_path, expected_prefix = self._parent_info() - - document_pb = document_pb2.Document() - - created_document_pb = self._client._firestore_api.create_document( - parent_path, - collection_id=self.id, - document_id=None, - document=document_pb, - mask=None, - metadata=self._client._rpc_metadata, - ) + document_id = _auto_id() - new_document_id = _helpers.get_doc_id(created_document_pb, expected_prefix) - document_ref = self.document(new_document_id) - set_result = document_ref.set(document_data) - return set_result.update_time, document_ref - else: - document_ref = self.document(document_id) - write_result = document_ref.create(document_data) - return write_result.update_time, document_ref + document_ref = self.document(document_id) + write_result = document_ref.create(document_data) + return write_result.update_time, document_ref def list_documents(self, page_size=None): """List all subdocuments of the current collection. @@ -446,7 +429,7 @@ def on_snapshot(self, callback): db = firestore_v1.Client() collection_ref = db.collection(u'users') - def on_snapshot(collection_snapshot): + def on_snapshot(collection_snapshot, changes, read_time): for doc in collection_snapshot.documents: print(u'{} => {}'.format(doc.id, doc.to_dict())) diff --git a/firestore/google/cloud/firestore_v1/document.py b/firestore/google/cloud/firestore_v1/document.py index df2861c1579d..571315e87563 100644 --- a/firestore/google/cloud/firestore_v1/document.py +++ b/firestore/google/cloud/firestore_v1/document.py @@ -512,7 +512,7 @@ def on_snapshot(self, callback): db = firestore_v1.Client() collection_ref = db.collection(u'users') - def on_snapshot(document_snapshot): + def on_snapshot(document_snapshot, changes, read_time): doc = document_snapshot print(u'{} => {}'.format(doc.id, doc.to_dict())) diff --git a/firestore/google/cloud/firestore_v1/gapic/enums.py b/firestore/google/cloud/firestore_v1/gapic/enums.py index 833761db83ba..857e350e454d 100644 --- a/firestore/google/cloud/firestore_v1/gapic/enums.py +++ b/firestore/google/cloud/firestore_v1/gapic/enums.py @@ -91,6 +91,10 @@ class Operator(enum.IntEnum): ``order_by``. EQUAL (int): Equal. ARRAY_CONTAINS (int): Contains. Requires that the field is an array. + IN (int): In. Requires that ``value`` is a non-empty ArrayValue with at most 10 + values. + ARRAY_CONTAINS_ANY (int): Contains any. Requires that the field is an array and ``value`` is a + non-empty ArrayValue with at most 10 values. """ OPERATOR_UNSPECIFIED = 0 @@ -100,6 +104,8 @@ class Operator(enum.IntEnum): GREATER_THAN_OR_EQUAL = 4 EQUAL = 5 ARRAY_CONTAINS = 7 + IN = 8 + ARRAY_CONTAINS_ANY = 9 class UnaryFilter(object): class Operator(enum.IntEnum): @@ -109,7 +115,7 @@ class Operator(enum.IntEnum): Attributes: OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. IS_NAN (int): Test if a field is equal to NaN. - IS_NULL (int): Test if an exprestion evaluates to Null. + IS_NULL (int): Test if an expression evaluates to Null. """ OPERATOR_UNSPECIFIED = 0 diff --git a/firestore/google/cloud/firestore_v1/proto/common.proto b/firestore/google/cloud/firestore_v1/proto/common.proto index 59c62997ad0d..8e2ef27ff28a 100644 --- a/firestore/google/cloud/firestore_v1/proto/common.proto +++ b/firestore/google/cloud/firestore_v1/proto/common.proto @@ -28,7 +28,6 @@ option java_package = "com.google.firestore.v1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\V1"; - // A set of field paths on a document. // Used to restrict a get or update operation on a document to a subset of its // fields. diff --git a/firestore/google/cloud/firestore_v1/proto/document.proto b/firestore/google/cloud/firestore_v1/proto/document.proto index d3d9c11c79cf..9110b4ff60da 100644 --- a/firestore/google/cloud/firestore_v1/proto/document.proto +++ b/firestore/google/cloud/firestore_v1/proto/document.proto @@ -30,7 +30,6 @@ option java_package = "com.google.firestore.v1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\V1"; - // A Firestore document. // // Must not exceed 1 MiB - 4 bytes. diff --git a/firestore/google/cloud/firestore_v1/proto/firestore.proto b/firestore/google/cloud/firestore_v1/proto/firestore.proto index 5dc9667e2ca2..2fb25deb7b54 100644 --- a/firestore/google/cloud/firestore_v1/proto/firestore.proto +++ b/firestore/google/cloud/firestore_v1/proto/firestore.proto @@ -25,6 +25,7 @@ import "google/firestore/v1/write.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; +import "google/api/client.proto"; option csharp_namespace = "Google.Cloud.Firestore.V1"; option go_package = "google.golang.org/genproto/googleapis/firestore/v1;firestore"; @@ -33,6 +34,7 @@ option java_outer_classname = "FirestoreProto"; option java_package = "com.google.firestore.v1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\V1"; + // Specification of the Firestore API. // The Cloud Firestore service. @@ -52,6 +54,11 @@ option php_namespace = "Google\\Cloud\\Firestore\\V1"; // committed. Any read with an equal or greater `read_time` is guaranteed // to see the effects of the transaction. service Firestore { + option (google.api.default_host) = "firestore.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/datastore"; + // Gets a single document. rpc GetDocument(GetDocumentRequest) returns (Document) { option (google.api.http) = { @@ -654,14 +661,8 @@ message Target { google.protobuf.Timestamp read_time = 11; } - // A client provided target ID. - // - // If not set, the server will assign an ID for the target. - // - // Used for resuming a target without changing IDs. The IDs can either be - // client-assigned or be server-assigned in a previous stream. All targets - // with client provided IDs must be added before adding a target that needs - // a server-assigned id. + // The target ID that identifies the target on the stream. Must be a positive + // number and non-zero. int32 target_id = 5; // If the target should be removed once it is current and consistent. @@ -706,11 +707,7 @@ message TargetChange { // // If empty, the change applies to all targets. // - // For `target_change_type=ADD`, the order of the target IDs matches the order - // of the requests to add the targets. This allows clients to unambiguously - // associate server-assigned target IDs with added targets. - // - // For other states, the order of the target IDs is not defined. + // The order of the target IDs is not defined. repeated int32 target_ids = 2; // The error that resulted in this change, if applicable. diff --git a/firestore/google/cloud/firestore_v1/proto/firestore_pb2.py b/firestore/google/cloud/firestore_v1/proto/firestore_pb2.py index 5932d5023ace..799fb7a839b3 100644 --- a/firestore/google/cloud/firestore_v1/proto/firestore_pb2.py +++ b/firestore/google/cloud/firestore_v1/proto/firestore_pb2.py @@ -31,6 +31,7 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -41,7 +42,7 @@ "\n\027com.google.firestore.v1B\016FirestoreProtoP\001Z\n\x06labels\x18\x04 \x03(\x0b\x32..google.firestore.v1.ListenRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0f\n\rtarget_change"\xd5\x02\n\x0eListenResponse\x12:\n\rtarget_change\x18\x02 \x01(\x0b\x32!.google.firestore.v1.TargetChangeH\x00\x12>\n\x0f\x64ocument_change\x18\x03 \x01(\x0b\x32#.google.firestore.v1.DocumentChangeH\x00\x12>\n\x0f\x64ocument_delete\x18\x04 \x01(\x0b\x32#.google.firestore.v1.DocumentDeleteH\x00\x12>\n\x0f\x64ocument_remove\x18\x06 \x01(\x0b\x32#.google.firestore.v1.DocumentRemoveH\x00\x12\x36\n\x06\x66ilter\x18\x05 \x01(\x0b\x32$.google.firestore.v1.ExistenceFilterH\x00\x42\x0f\n\rresponse_type"\xa1\x03\n\x06Target\x12\x38\n\x05query\x18\x02 \x01(\x0b\x32\'.google.firestore.v1.Target.QueryTargetH\x00\x12@\n\tdocuments\x18\x03 \x01(\x0b\x32+.google.firestore.v1.Target.DocumentsTargetH\x00\x12\x16\n\x0cresume_token\x18\x04 \x01(\x0cH\x01\x12/\n\tread_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x12\x11\n\ttarget_id\x18\x05 \x01(\x05\x12\x0c\n\x04once\x18\x06 \x01(\x08\x1a$\n\x0f\x44ocumentsTarget\x12\x11\n\tdocuments\x18\x02 \x03(\t\x1am\n\x0bQueryTarget\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12@\n\x10structured_query\x18\x02 \x01(\x0b\x32$.google.firestore.v1.StructuredQueryH\x00\x42\x0c\n\nquery_typeB\r\n\x0btarget_typeB\r\n\x0bresume_type"\xaa\x02\n\x0cTargetChange\x12N\n\x12target_change_type\x18\x01 \x01(\x0e\x32\x32.google.firestore.v1.TargetChange.TargetChangeType\x12\x12\n\ntarget_ids\x18\x02 \x03(\x05\x12!\n\x05\x63\x61use\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x14\n\x0cresume_token\x18\x04 \x01(\x0c\x12-\n\tread_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x10TargetChangeType\x12\r\n\tNO_CHANGE\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02\x12\x0b\n\x07\x43URRENT\x10\x03\x12\t\n\x05RESET\x10\x04"Q\n\x18ListCollectionIdsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"L\n\x19ListCollectionIdsResponse\x12\x16\n\x0e\x63ollection_ids\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x84\x12\n\tFirestore\x12\x8f\x01\n\x0bGetDocument\x12\'.google.firestore.v1.GetDocumentRequest\x1a\x1d.google.firestore.v1.Document"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1/{name=projects/*/databases/*/documents/*/**}\x12\xb2\x01\n\rListDocuments\x12).google.firestore.v1.ListDocumentsRequest\x1a*.google.firestore.v1.ListDocumentsResponse"J\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}\x12\xaf\x01\n\x0e\x43reateDocument\x12*.google.firestore.v1.CreateDocumentRequest\x1a\x1d.google.firestore.v1.Document"R\x82\xd3\xe4\x93\x02L"@/v1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\x08\x64ocument\x12\xa8\x01\n\x0eUpdateDocument\x12*.google.firestore.v1.UpdateDocumentRequest\x1a\x1d.google.firestore.v1.Document"K\x82\xd3\xe4\x93\x02\x45\x32\x39/v1/{document.name=projects/*/databases/*/documents/*/**}:\x08\x64ocument\x12\x8e\x01\n\x0e\x44\x65leteDocument\x12*.google.firestore.v1.DeleteDocumentRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\x32*0/v1/{name=projects/*/databases/*/documents/*/**}\x12\xb9\x01\n\x11\x42\x61tchGetDocuments\x12-.google.firestore.v1.BatchGetDocumentsRequest\x1a..google.firestore.v1.BatchGetDocumentsResponse"C\x82\xd3\xe4\x93\x02="8/v1/{database=projects/*/databases/*}/documents:batchGet:\x01*0\x01\x12\xbc\x01\n\x10\x42\x65ginTransaction\x12,.google.firestore.v1.BeginTransactionRequest\x1a-.google.firestore.v1.BeginTransactionResponse"K\x82\xd3\xe4\x93\x02\x45"@/v1/{database=projects/*/databases/*}/documents:beginTransaction:\x01*\x12\x94\x01\n\x06\x43ommit\x12".google.firestore.v1.CommitRequest\x1a#.google.firestore.v1.CommitResponse"A\x82\xd3\xe4\x93\x02;"6/v1/{database=projects/*/databases/*}/documents:commit:\x01*\x12\x8d\x01\n\x08Rollback\x12$.google.firestore.v1.RollbackRequest\x1a\x16.google.protobuf.Empty"C\x82\xd3\xe4\x93\x02="8/v1/{database=projects/*/databases/*}/documents:rollback:\x01*\x12\xdf\x01\n\x08RunQuery\x12$.google.firestore.v1.RunQueryRequest\x1a%.google.firestore.v1.RunQueryResponse"\x83\x01\x82\xd3\xe4\x93\x02}"6/v1/{parent=projects/*/databases/*/documents}:runQuery:\x01*Z@";/v1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\x01*0\x01\x12\x94\x01\n\x05Write\x12!.google.firestore.v1.WriteRequest\x1a".google.firestore.v1.WriteResponse"@\x82\xd3\xe4\x93\x02:"5/v1/{database=projects/*/databases/*}/documents:write:\x01*(\x01\x30\x01\x12\x98\x01\n\x06Listen\x12".google.firestore.v1.ListenRequest\x1a#.google.firestore.v1.ListenResponse"A\x82\xd3\xe4\x93\x02;"6/v1/{database=projects/*/databases/*}/documents:listen:\x01*(\x01\x30\x01\x12\x8b\x02\n\x11ListCollectionIds\x12-.google.firestore.v1.ListCollectionIdsRequest\x1a..google.firestore.v1.ListCollectionIdsResponse"\x96\x01\x82\xd3\xe4\x93\x02\x8f\x01"?/v1/{parent=projects/*/databases/*/documents}:listCollectionIds:\x01*ZI"D/v1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\x01*B\xa8\x01\n\x17\x63om.google.firestore.v1B\x0e\x46irestoreProtoP\x01Z\n\x06labels\x18\x04 \x03(\x0b\x32..google.firestore.v1.ListenRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0f\n\rtarget_change"\xd5\x02\n\x0eListenResponse\x12:\n\rtarget_change\x18\x02 \x01(\x0b\x32!.google.firestore.v1.TargetChangeH\x00\x12>\n\x0f\x64ocument_change\x18\x03 \x01(\x0b\x32#.google.firestore.v1.DocumentChangeH\x00\x12>\n\x0f\x64ocument_delete\x18\x04 \x01(\x0b\x32#.google.firestore.v1.DocumentDeleteH\x00\x12>\n\x0f\x64ocument_remove\x18\x06 \x01(\x0b\x32#.google.firestore.v1.DocumentRemoveH\x00\x12\x36\n\x06\x66ilter\x18\x05 \x01(\x0b\x32$.google.firestore.v1.ExistenceFilterH\x00\x42\x0f\n\rresponse_type"\xa1\x03\n\x06Target\x12\x38\n\x05query\x18\x02 \x01(\x0b\x32\'.google.firestore.v1.Target.QueryTargetH\x00\x12@\n\tdocuments\x18\x03 \x01(\x0b\x32+.google.firestore.v1.Target.DocumentsTargetH\x00\x12\x16\n\x0cresume_token\x18\x04 \x01(\x0cH\x01\x12/\n\tread_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x12\x11\n\ttarget_id\x18\x05 \x01(\x05\x12\x0c\n\x04once\x18\x06 \x01(\x08\x1a$\n\x0f\x44ocumentsTarget\x12\x11\n\tdocuments\x18\x02 \x03(\t\x1am\n\x0bQueryTarget\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12@\n\x10structured_query\x18\x02 \x01(\x0b\x32$.google.firestore.v1.StructuredQueryH\x00\x42\x0c\n\nquery_typeB\r\n\x0btarget_typeB\r\n\x0bresume_type"\xaa\x02\n\x0cTargetChange\x12N\n\x12target_change_type\x18\x01 \x01(\x0e\x32\x32.google.firestore.v1.TargetChange.TargetChangeType\x12\x12\n\ntarget_ids\x18\x02 \x03(\x05\x12!\n\x05\x63\x61use\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x14\n\x0cresume_token\x18\x04 \x01(\x0c\x12-\n\tread_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x10TargetChangeType\x12\r\n\tNO_CHANGE\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02\x12\x0b\n\x07\x43URRENT\x10\x03\x12\t\n\x05RESET\x10\x04"Q\n\x18ListCollectionIdsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"L\n\x19ListCollectionIdsResponse\x12\x16\n\x0e\x63ollection_ids\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xfc\x12\n\tFirestore\x12\x8f\x01\n\x0bGetDocument\x12\'.google.firestore.v1.GetDocumentRequest\x1a\x1d.google.firestore.v1.Document"8\x82\xd3\xe4\x93\x02\x32\x12\x30/v1/{name=projects/*/databases/*/documents/*/**}\x12\xb2\x01\n\rListDocuments\x12).google.firestore.v1.ListDocumentsRequest\x1a*.google.firestore.v1.ListDocumentsResponse"J\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}\x12\xaf\x01\n\x0e\x43reateDocument\x12*.google.firestore.v1.CreateDocumentRequest\x1a\x1d.google.firestore.v1.Document"R\x82\xd3\xe4\x93\x02L"@/v1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\x08\x64ocument\x12\xa8\x01\n\x0eUpdateDocument\x12*.google.firestore.v1.UpdateDocumentRequest\x1a\x1d.google.firestore.v1.Document"K\x82\xd3\xe4\x93\x02\x45\x32\x39/v1/{document.name=projects/*/databases/*/documents/*/**}:\x08\x64ocument\x12\x8e\x01\n\x0e\x44\x65leteDocument\x12*.google.firestore.v1.DeleteDocumentRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\x32*0/v1/{name=projects/*/databases/*/documents/*/**}\x12\xb9\x01\n\x11\x42\x61tchGetDocuments\x12-.google.firestore.v1.BatchGetDocumentsRequest\x1a..google.firestore.v1.BatchGetDocumentsResponse"C\x82\xd3\xe4\x93\x02="8/v1/{database=projects/*/databases/*}/documents:batchGet:\x01*0\x01\x12\xbc\x01\n\x10\x42\x65ginTransaction\x12,.google.firestore.v1.BeginTransactionRequest\x1a-.google.firestore.v1.BeginTransactionResponse"K\x82\xd3\xe4\x93\x02\x45"@/v1/{database=projects/*/databases/*}/documents:beginTransaction:\x01*\x12\x94\x01\n\x06\x43ommit\x12".google.firestore.v1.CommitRequest\x1a#.google.firestore.v1.CommitResponse"A\x82\xd3\xe4\x93\x02;"6/v1/{database=projects/*/databases/*}/documents:commit:\x01*\x12\x8d\x01\n\x08Rollback\x12$.google.firestore.v1.RollbackRequest\x1a\x16.google.protobuf.Empty"C\x82\xd3\xe4\x93\x02="8/v1/{database=projects/*/databases/*}/documents:rollback:\x01*\x12\xdf\x01\n\x08RunQuery\x12$.google.firestore.v1.RunQueryRequest\x1a%.google.firestore.v1.RunQueryResponse"\x83\x01\x82\xd3\xe4\x93\x02}"6/v1/{parent=projects/*/databases/*/documents}:runQuery:\x01*Z@";/v1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\x01*0\x01\x12\x94\x01\n\x05Write\x12!.google.firestore.v1.WriteRequest\x1a".google.firestore.v1.WriteResponse"@\x82\xd3\xe4\x93\x02:"5/v1/{database=projects/*/databases/*}/documents:write:\x01*(\x01\x30\x01\x12\x98\x01\n\x06Listen\x12".google.firestore.v1.ListenRequest\x1a#.google.firestore.v1.ListenResponse"A\x82\xd3\xe4\x93\x02;"6/v1/{database=projects/*/databases/*}/documents:listen:\x01*(\x01\x30\x01\x12\x8b\x02\n\x11ListCollectionIds\x12-.google.firestore.v1.ListCollectionIdsRequest\x1a..google.firestore.v1.ListCollectionIdsResponse"\x96\x01\x82\xd3\xe4\x93\x02\x8f\x01"?/v1/{parent=projects/*/databases/*/documents}:listCollectionIds:\x01*ZI"D/v1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\x01*\x1av\xca\x41\x18\x66irestore.googleapis.com\xd2\x41Xhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastoreB\xa8\x01\n\x17\x63om.google.firestore.v1B\x0e\x46irestoreProtoP\x01Z=": _operator_enum.GREATER_THAN_OR_EQUAL, ">": _operator_enum.GREATER_THAN, "array_contains": _operator_enum.ARRAY_CONTAINS, + "in": _operator_enum.IN, + "array_contains_any": _operator_enum.ARRAY_CONTAINS_ANY, } _BAD_OP_STRING = "Operator string {!r} is invalid. Valid choices are: {}." _BAD_OP_NAN_NULL = 'Only an equality filter ("==") can be used with None or NaN values' diff --git a/firestore/google/cloud/firestore_v1beta1/gapic/enums.py b/firestore/google/cloud/firestore_v1beta1/gapic/enums.py index 833761db83ba..857e350e454d 100644 --- a/firestore/google/cloud/firestore_v1beta1/gapic/enums.py +++ b/firestore/google/cloud/firestore_v1beta1/gapic/enums.py @@ -91,6 +91,10 @@ class Operator(enum.IntEnum): ``order_by``. EQUAL (int): Equal. ARRAY_CONTAINS (int): Contains. Requires that the field is an array. + IN (int): In. Requires that ``value`` is a non-empty ArrayValue with at most 10 + values. + ARRAY_CONTAINS_ANY (int): Contains any. Requires that the field is an array and ``value`` is a + non-empty ArrayValue with at most 10 values. """ OPERATOR_UNSPECIFIED = 0 @@ -100,6 +104,8 @@ class Operator(enum.IntEnum): GREATER_THAN_OR_EQUAL = 4 EQUAL = 5 ARRAY_CONTAINS = 7 + IN = 8 + ARRAY_CONTAINS_ANY = 9 class UnaryFilter(object): class Operator(enum.IntEnum): @@ -109,7 +115,7 @@ class Operator(enum.IntEnum): Attributes: OPERATOR_UNSPECIFIED (int): Unspecified. This value must not be used. IS_NAN (int): Test if a field is equal to NaN. - IS_NULL (int): Test if an exprestion evaluates to Null. + IS_NULL (int): Test if an expression evaluates to Null. """ OPERATOR_UNSPECIFIED = 0 diff --git a/firestore/google/cloud/firestore_v1beta1/proto/common.proto b/firestore/google/cloud/firestore_v1beta1/proto/common.proto index 87c88a3be630..2eaa183470d7 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/common.proto +++ b/firestore/google/cloud/firestore_v1beta1/proto/common.proto @@ -28,7 +28,6 @@ option java_package = "com.google.firestore.v1beta1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\V1beta1"; - // A set of field paths on a document. // Used to restrict a get or update operation on a document to a subset of its // fields. diff --git a/firestore/google/cloud/firestore_v1beta1/proto/document.proto b/firestore/google/cloud/firestore_v1beta1/proto/document.proto index 8a043df59980..7caae4688a5f 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/document.proto +++ b/firestore/google/cloud/firestore_v1beta1/proto/document.proto @@ -30,7 +30,6 @@ option java_package = "com.google.firestore.v1beta1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\V1beta1"; - // A Firestore document. // // Must not exceed 1 MiB - 4 bytes. diff --git a/firestore/google/cloud/firestore_v1beta1/proto/firestore.proto b/firestore/google/cloud/firestore_v1beta1/proto/firestore.proto index b7ba79075a40..ff0f03c709d6 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/firestore.proto +++ b/firestore/google/cloud/firestore_v1beta1/proto/firestore.proto @@ -25,6 +25,7 @@ import "google/firestore/v1beta1/write.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; +import "google/api/client.proto"; option csharp_namespace = "Google.Cloud.Firestore.V1Beta1"; option go_package = "google.golang.org/genproto/googleapis/firestore/v1beta1;firestore"; @@ -33,6 +34,7 @@ option java_outer_classname = "FirestoreProto"; option java_package = "com.google.firestore.v1beta1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\V1beta1"; + // Specification of the Firestore API. // The Cloud Firestore service. @@ -52,6 +54,11 @@ option php_namespace = "Google\\Cloud\\Firestore\\V1beta1"; // committed. Any read with an equal or greater `read_time` is guaranteed // to see the effects of the transaction. service Firestore { + option (google.api.default_host) = "firestore.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/datastore"; + // Gets a single document. rpc GetDocument(GetDocumentRequest) returns (Document) { option (google.api.http) = { @@ -654,14 +661,8 @@ message Target { google.protobuf.Timestamp read_time = 11; } - // A client provided target ID. - // - // If not set, the server will assign an ID for the target. - // - // Used for resuming a target without changing IDs. The IDs can either be - // client-assigned or be server-assigned in a previous stream. All targets - // with client provided IDs must be added before adding a target that needs - // a server-assigned id. + // The target ID that identifies the target on the stream. Must be a positive + // number and non-zero. int32 target_id = 5; // If the target should be removed once it is current and consistent. @@ -706,11 +707,7 @@ message TargetChange { // // If empty, the change applies to all targets. // - // For `target_change_type=ADD`, the order of the target IDs matches the order - // of the requests to add the targets. This allows clients to unambiguously - // associate server-assigned target IDs with added targets. - // - // For other states, the order of the target IDs is not defined. + // The order of the target IDs is not defined. repeated int32 target_ids = 2; // The error that resulted in this change, if applicable. diff --git a/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2.py index 301347f11d08..5c58fefeefdb 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2.py @@ -31,6 +31,7 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -41,7 +42,7 @@ "\n\034com.google.firestore.v1beta1B\016FirestoreProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1" ), serialized_pb=_b( - '\n4google/cloud/firestore_v1beta1/proto/firestore.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x30google/cloud/firestore_v1beta1/proto/write.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xb8\x01\n\x12GetDocumentRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x04mask\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x03 \x01(\x0cH\x00\x12/\n\tread_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selector"\xa2\x02\n\x14ListDocumentsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x10\n\x08order_by\x18\x06 \x01(\t\x12\x34\n\x04mask\x18\x07 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x08 \x01(\x0cH\x00\x12/\n\tread_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x14\n\x0cshow_missing\x18\x0c \x01(\x08\x42\x16\n\x14\x63onsistency_selector"g\n\x15ListDocumentsResponse\x12\x35\n\tdocuments\x18\x01 \x03(\x0b\x32".google.firestore.v1beta1.Document\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xbf\x01\n\x15\x43reateDocumentRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x13\n\x0b\x64ocument_id\x18\x03 \x01(\t\x12\x34\n\x08\x64ocument\x18\x04 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x34\n\x04mask\x18\x05 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask"\x82\x02\n\x15UpdateDocumentRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12;\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x34\n\x04mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12@\n\x10\x63urrent_document\x18\x04 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition"g\n\x15\x44\x65leteDocumentRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\x10\x63urrent_document\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition"\x9e\x02\n\x18\x42\x61tchGetDocumentsRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x11\n\tdocuments\x18\x02 \x03(\t\x12\x34\n\x04mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x04 \x01(\x0cH\x00\x12G\n\x0fnew_transaction\x18\x05 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptionsH\x00\x12/\n\tread_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selector"\xb1\x01\n\x19\x42\x61tchGetDocumentsResponse\x12\x33\n\x05\x66ound\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.DocumentH\x00\x12\x11\n\x07missing\x18\x02 \x01(\tH\x00\x12\x13\n\x0btransaction\x18\x03 \x01(\x0c\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\n\x06result"j\n\x17\x42\x65ginTransactionRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12=\n\x07options\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptions"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c"g\n\rCommitRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12/\n\x06writes\x18\x02 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Write\x12\x13\n\x0btransaction\x18\x03 \x01(\x0c"\x7f\n\x0e\x43ommitResponse\x12<\n\rwrite_results\x18\x01 \x03(\x0b\x32%.google.firestore.v1beta1.WriteResult\x12/\n\x0b\x63ommit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"8\n\x0fRollbackRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c"\x9f\x02\n\x0fRunQueryRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x45\n\x10structured_query\x18\x02 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQueryH\x00\x12\x15\n\x0btransaction\x18\x05 \x01(\x0cH\x01\x12G\n\x0fnew_transaction\x18\x06 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptionsH\x01\x12/\n\tread_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x42\x0c\n\nquery_typeB\x16\n\x14\x63onsistency_selector"\xa5\x01\n\x10RunQueryResponse\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x17\n\x0fskipped_results\x18\x04 \x01(\x05"\xed\x01\n\x0cWriteRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x11\n\tstream_id\x18\x02 \x01(\t\x12/\n\x06writes\x18\x03 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Write\x12\x14\n\x0cstream_token\x18\x04 \x01(\x0c\x12\x42\n\x06labels\x18\x05 \x03(\x0b\x32\x32.google.firestore.v1beta1.WriteRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa7\x01\n\rWriteResponse\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x14\n\x0cstream_token\x18\x02 \x01(\x0c\x12<\n\rwrite_results\x18\x03 \x03(\x0b\x32%.google.firestore.v1beta1.WriteResult\x12/\n\x0b\x63ommit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xf7\x01\n\rListenRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x36\n\nadd_target\x18\x02 \x01(\x0b\x32 .google.firestore.v1beta1.TargetH\x00\x12\x17\n\rremove_target\x18\x03 \x01(\x05H\x00\x12\x43\n\x06labels\x18\x04 \x03(\x0b\x32\x33.google.firestore.v1beta1.ListenRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0f\n\rtarget_change"\xee\x02\n\x0eListenResponse\x12?\n\rtarget_change\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.TargetChangeH\x00\x12\x43\n\x0f\x64ocument_change\x18\x03 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentChangeH\x00\x12\x43\n\x0f\x64ocument_delete\x18\x04 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentDeleteH\x00\x12\x43\n\x0f\x64ocument_remove\x18\x06 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentRemoveH\x00\x12;\n\x06\x66ilter\x18\x05 \x01(\x0b\x32).google.firestore.v1beta1.ExistenceFilterH\x00\x42\x0f\n\rresponse_type"\xb0\x03\n\x06Target\x12=\n\x05query\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.Target.QueryTargetH\x00\x12\x45\n\tdocuments\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.Target.DocumentsTargetH\x00\x12\x16\n\x0cresume_token\x18\x04 \x01(\x0cH\x01\x12/\n\tread_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x12\x11\n\ttarget_id\x18\x05 \x01(\x05\x12\x0c\n\x04once\x18\x06 \x01(\x08\x1a$\n\x0f\x44ocumentsTarget\x12\x11\n\tdocuments\x18\x02 \x03(\t\x1ar\n\x0bQueryTarget\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x45\n\x10structured_query\x18\x02 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQueryH\x00\x42\x0c\n\nquery_typeB\r\n\x0btarget_typeB\r\n\x0bresume_type"\xaf\x02\n\x0cTargetChange\x12S\n\x12target_change_type\x18\x01 \x01(\x0e\x32\x37.google.firestore.v1beta1.TargetChange.TargetChangeType\x12\x12\n\ntarget_ids\x18\x02 \x03(\x05\x12!\n\x05\x63\x61use\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x14\n\x0cresume_token\x18\x04 \x01(\x0c\x12-\n\tread_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x10TargetChangeType\x12\r\n\tNO_CHANGE\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02\x12\x0b\n\x07\x43URRENT\x10\x03\x12\t\n\x05RESET\x10\x04"Q\n\x18ListCollectionIdsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"L\n\x19ListCollectionIdsResponse\x12\x16\n\x0e\x63ollection_ids\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xc8\x13\n\tFirestore\x12\x9e\x01\n\x0bGetDocument\x12,.google.firestore.v1beta1.GetDocumentRequest\x1a".google.firestore.v1beta1.Document"=\x82\xd3\xe4\x93\x02\x37\x12\x35/v1beta1/{name=projects/*/databases/*/documents/*/**}\x12\xc1\x01\n\rListDocuments\x12..google.firestore.v1beta1.ListDocumentsRequest\x1a/.google.firestore.v1beta1.ListDocumentsResponse"O\x82\xd3\xe4\x93\x02I\x12G/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}\x12\xbe\x01\n\x0e\x43reateDocument\x12/.google.firestore.v1beta1.CreateDocumentRequest\x1a".google.firestore.v1beta1.Document"W\x82\xd3\xe4\x93\x02Q"E/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\x08\x64ocument\x12\xb7\x01\n\x0eUpdateDocument\x12/.google.firestore.v1beta1.UpdateDocumentRequest\x1a".google.firestore.v1beta1.Document"P\x82\xd3\xe4\x93\x02J2>/v1beta1/{document.name=projects/*/databases/*/documents/*/**}:\x08\x64ocument\x12\x98\x01\n\x0e\x44\x65leteDocument\x12/.google.firestore.v1beta1.DeleteDocumentRequest\x1a\x16.google.protobuf.Empty"=\x82\xd3\xe4\x93\x02\x37*5/v1beta1/{name=projects/*/databases/*/documents/*/**}\x12\xc8\x01\n\x11\x42\x61tchGetDocuments\x12\x32.google.firestore.v1beta1.BatchGetDocumentsRequest\x1a\x33.google.firestore.v1beta1.BatchGetDocumentsResponse"H\x82\xd3\xe4\x93\x02\x42"=/v1beta1/{database=projects/*/databases/*}/documents:batchGet:\x01*0\x01\x12\xcb\x01\n\x10\x42\x65ginTransaction\x12\x31.google.firestore.v1beta1.BeginTransactionRequest\x1a\x32.google.firestore.v1beta1.BeginTransactionResponse"P\x82\xd3\xe4\x93\x02J"E/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction:\x01*\x12\xa3\x01\n\x06\x43ommit\x12\'.google.firestore.v1beta1.CommitRequest\x1a(.google.firestore.v1beta1.CommitResponse"F\x82\xd3\xe4\x93\x02@";/v1beta1/{database=projects/*/databases/*}/documents:commit:\x01*\x12\x97\x01\n\x08Rollback\x12).google.firestore.v1beta1.RollbackRequest\x1a\x16.google.protobuf.Empty"H\x82\xd3\xe4\x93\x02\x42"=/v1beta1/{database=projects/*/databases/*}/documents:rollback:\x01*\x12\xf4\x01\n\x08RunQuery\x12).google.firestore.v1beta1.RunQueryRequest\x1a*.google.firestore.v1beta1.RunQueryResponse"\x8e\x01\x82\xd3\xe4\x93\x02\x87\x01";/v1beta1/{parent=projects/*/databases/*/documents}:runQuery:\x01*ZE"@/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\x01*0\x01\x12\xa3\x01\n\x05Write\x12&.google.firestore.v1beta1.WriteRequest\x1a\'.google.firestore.v1beta1.WriteResponse"E\x82\xd3\xe4\x93\x02?":/v1beta1/{database=projects/*/databases/*}/documents:write:\x01*(\x01\x30\x01\x12\xa7\x01\n\x06Listen\x12\'.google.firestore.v1beta1.ListenRequest\x1a(.google.firestore.v1beta1.ListenResponse"F\x82\xd3\xe4\x93\x02@";/v1beta1/{database=projects/*/databases/*}/documents:listen:\x01*(\x01\x30\x01\x12\x9f\x02\n\x11ListCollectionIds\x12\x32.google.firestore.v1beta1.ListCollectionIdsRequest\x1a\x33.google.firestore.v1beta1.ListCollectionIdsResponse"\xa0\x01\x82\xd3\xe4\x93\x02\x99\x01"D/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds:\x01*ZN"I/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\x01*B\xbc\x01\n\x1c\x63om.google.firestore.v1beta1B\x0e\x46irestoreProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' + '\n4google/cloud/firestore_v1beta1/proto/firestore.proto\x12\x18google.firestore.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x30google/cloud/firestore_v1beta1/proto/write.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x17google/api/client.proto"\xb8\x01\n\x12GetDocumentRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x34\n\x04mask\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x03 \x01(\x0cH\x00\x12/\n\tread_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selector"\xa2\x02\n\x14ListDocumentsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x10\n\x08order_by\x18\x06 \x01(\t\x12\x34\n\x04mask\x18\x07 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x08 \x01(\x0cH\x00\x12/\n\tread_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x14\n\x0cshow_missing\x18\x0c \x01(\x08\x42\x16\n\x14\x63onsistency_selector"g\n\x15ListDocumentsResponse\x12\x35\n\tdocuments\x18\x01 \x03(\x0b\x32".google.firestore.v1beta1.Document\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xbf\x01\n\x15\x43reateDocumentRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x13\n\x0b\x64ocument_id\x18\x03 \x01(\t\x12\x34\n\x08\x64ocument\x18\x04 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12\x34\n\x04mask\x18\x05 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask"\x82\x02\n\x15UpdateDocumentRequest\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12;\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x34\n\x04mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12@\n\x10\x63urrent_document\x18\x04 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition"g\n\x15\x44\x65leteDocumentRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12@\n\x10\x63urrent_document\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition"\x9e\x02\n\x18\x42\x61tchGetDocumentsRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x11\n\tdocuments\x18\x02 \x03(\t\x12\x34\n\x04mask\x18\x03 \x01(\x0b\x32&.google.firestore.v1beta1.DocumentMask\x12\x15\n\x0btransaction\x18\x04 \x01(\x0cH\x00\x12G\n\x0fnew_transaction\x18\x05 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptionsH\x00\x12/\n\tread_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x16\n\x14\x63onsistency_selector"\xb1\x01\n\x19\x42\x61tchGetDocumentsResponse\x12\x33\n\x05\x66ound\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.DocumentH\x00\x12\x11\n\x07missing\x18\x02 \x01(\tH\x00\x12\x13\n\x0btransaction\x18\x03 \x01(\x0c\x12-\n\tread_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\n\x06result"j\n\x17\x42\x65ginTransactionRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12=\n\x07options\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptions"/\n\x18\x42\x65ginTransactionResponse\x12\x13\n\x0btransaction\x18\x01 \x01(\x0c"g\n\rCommitRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12/\n\x06writes\x18\x02 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Write\x12\x13\n\x0btransaction\x18\x03 \x01(\x0c"\x7f\n\x0e\x43ommitResponse\x12<\n\rwrite_results\x18\x01 \x03(\x0b\x32%.google.firestore.v1beta1.WriteResult\x12/\n\x0b\x63ommit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"8\n\x0fRollbackRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c"\x9f\x02\n\x0fRunQueryRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x45\n\x10structured_query\x18\x02 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQueryH\x00\x12\x15\n\x0btransaction\x18\x05 \x01(\x0cH\x01\x12G\n\x0fnew_transaction\x18\x06 \x01(\x0b\x32,.google.firestore.v1beta1.TransactionOptionsH\x01\x12/\n\tread_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x42\x0c\n\nquery_typeB\x16\n\x14\x63onsistency_selector"\xa5\x01\n\x10RunQueryResponse\x12\x13\n\x0btransaction\x18\x02 \x01(\x0c\x12\x34\n\x08\x64ocument\x18\x01 \x01(\x0b\x32".google.firestore.v1beta1.Document\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x17\n\x0fskipped_results\x18\x04 \x01(\x05"\xed\x01\n\x0cWriteRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x11\n\tstream_id\x18\x02 \x01(\t\x12/\n\x06writes\x18\x03 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Write\x12\x14\n\x0cstream_token\x18\x04 \x01(\x0c\x12\x42\n\x06labels\x18\x05 \x03(\x0b\x32\x32.google.firestore.v1beta1.WriteRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa7\x01\n\rWriteResponse\x12\x11\n\tstream_id\x18\x01 \x01(\t\x12\x14\n\x0cstream_token\x18\x02 \x01(\x0c\x12<\n\rwrite_results\x18\x03 \x03(\x0b\x32%.google.firestore.v1beta1.WriteResult\x12/\n\x0b\x63ommit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xf7\x01\n\rListenRequest\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x36\n\nadd_target\x18\x02 \x01(\x0b\x32 .google.firestore.v1beta1.TargetH\x00\x12\x17\n\rremove_target\x18\x03 \x01(\x05H\x00\x12\x43\n\x06labels\x18\x04 \x03(\x0b\x32\x33.google.firestore.v1beta1.ListenRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x0f\n\rtarget_change"\xee\x02\n\x0eListenResponse\x12?\n\rtarget_change\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.TargetChangeH\x00\x12\x43\n\x0f\x64ocument_change\x18\x03 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentChangeH\x00\x12\x43\n\x0f\x64ocument_delete\x18\x04 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentDeleteH\x00\x12\x43\n\x0f\x64ocument_remove\x18\x06 \x01(\x0b\x32(.google.firestore.v1beta1.DocumentRemoveH\x00\x12;\n\x06\x66ilter\x18\x05 \x01(\x0b\x32).google.firestore.v1beta1.ExistenceFilterH\x00\x42\x0f\n\rresponse_type"\xb0\x03\n\x06Target\x12=\n\x05query\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.Target.QueryTargetH\x00\x12\x45\n\tdocuments\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.Target.DocumentsTargetH\x00\x12\x16\n\x0cresume_token\x18\x04 \x01(\x0cH\x01\x12/\n\tread_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x01\x12\x11\n\ttarget_id\x18\x05 \x01(\x05\x12\x0c\n\x04once\x18\x06 \x01(\x08\x1a$\n\x0f\x44ocumentsTarget\x12\x11\n\tdocuments\x18\x02 \x03(\t\x1ar\n\x0bQueryTarget\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x45\n\x10structured_query\x18\x02 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQueryH\x00\x42\x0c\n\nquery_typeB\r\n\x0btarget_typeB\r\n\x0bresume_type"\xaf\x02\n\x0cTargetChange\x12S\n\x12target_change_type\x18\x01 \x01(\x0e\x32\x37.google.firestore.v1beta1.TargetChange.TargetChangeType\x12\x12\n\ntarget_ids\x18\x02 \x03(\x05\x12!\n\x05\x63\x61use\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12\x14\n\x0cresume_token\x18\x04 \x01(\x0c\x12-\n\tread_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x10TargetChangeType\x12\r\n\tNO_CHANGE\x10\x00\x12\x07\n\x03\x41\x44\x44\x10\x01\x12\n\n\x06REMOVE\x10\x02\x12\x0b\n\x07\x43URRENT\x10\x03\x12\t\n\x05RESET\x10\x04"Q\n\x18ListCollectionIdsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"L\n\x19ListCollectionIdsResponse\x12\x16\n\x0e\x63ollection_ids\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xc0\x14\n\tFirestore\x12\x9e\x01\n\x0bGetDocument\x12,.google.firestore.v1beta1.GetDocumentRequest\x1a".google.firestore.v1beta1.Document"=\x82\xd3\xe4\x93\x02\x37\x12\x35/v1beta1/{name=projects/*/databases/*/documents/*/**}\x12\xc1\x01\n\rListDocuments\x12..google.firestore.v1beta1.ListDocumentsRequest\x1a/.google.firestore.v1beta1.ListDocumentsResponse"O\x82\xd3\xe4\x93\x02I\x12G/v1beta1/{parent=projects/*/databases/*/documents/*/**}/{collection_id}\x12\xbe\x01\n\x0e\x43reateDocument\x12/.google.firestore.v1beta1.CreateDocumentRequest\x1a".google.firestore.v1beta1.Document"W\x82\xd3\xe4\x93\x02Q"E/v1beta1/{parent=projects/*/databases/*/documents/**}/{collection_id}:\x08\x64ocument\x12\xb7\x01\n\x0eUpdateDocument\x12/.google.firestore.v1beta1.UpdateDocumentRequest\x1a".google.firestore.v1beta1.Document"P\x82\xd3\xe4\x93\x02J2>/v1beta1/{document.name=projects/*/databases/*/documents/*/**}:\x08\x64ocument\x12\x98\x01\n\x0e\x44\x65leteDocument\x12/.google.firestore.v1beta1.DeleteDocumentRequest\x1a\x16.google.protobuf.Empty"=\x82\xd3\xe4\x93\x02\x37*5/v1beta1/{name=projects/*/databases/*/documents/*/**}\x12\xc8\x01\n\x11\x42\x61tchGetDocuments\x12\x32.google.firestore.v1beta1.BatchGetDocumentsRequest\x1a\x33.google.firestore.v1beta1.BatchGetDocumentsResponse"H\x82\xd3\xe4\x93\x02\x42"=/v1beta1/{database=projects/*/databases/*}/documents:batchGet:\x01*0\x01\x12\xcb\x01\n\x10\x42\x65ginTransaction\x12\x31.google.firestore.v1beta1.BeginTransactionRequest\x1a\x32.google.firestore.v1beta1.BeginTransactionResponse"P\x82\xd3\xe4\x93\x02J"E/v1beta1/{database=projects/*/databases/*}/documents:beginTransaction:\x01*\x12\xa3\x01\n\x06\x43ommit\x12\'.google.firestore.v1beta1.CommitRequest\x1a(.google.firestore.v1beta1.CommitResponse"F\x82\xd3\xe4\x93\x02@";/v1beta1/{database=projects/*/databases/*}/documents:commit:\x01*\x12\x97\x01\n\x08Rollback\x12).google.firestore.v1beta1.RollbackRequest\x1a\x16.google.protobuf.Empty"H\x82\xd3\xe4\x93\x02\x42"=/v1beta1/{database=projects/*/databases/*}/documents:rollback:\x01*\x12\xf4\x01\n\x08RunQuery\x12).google.firestore.v1beta1.RunQueryRequest\x1a*.google.firestore.v1beta1.RunQueryResponse"\x8e\x01\x82\xd3\xe4\x93\x02\x87\x01";/v1beta1/{parent=projects/*/databases/*/documents}:runQuery:\x01*ZE"@/v1beta1/{parent=projects/*/databases/*/documents/*/**}:runQuery:\x01*0\x01\x12\xa3\x01\n\x05Write\x12&.google.firestore.v1beta1.WriteRequest\x1a\'.google.firestore.v1beta1.WriteResponse"E\x82\xd3\xe4\x93\x02?":/v1beta1/{database=projects/*/databases/*}/documents:write:\x01*(\x01\x30\x01\x12\xa7\x01\n\x06Listen\x12\'.google.firestore.v1beta1.ListenRequest\x1a(.google.firestore.v1beta1.ListenResponse"F\x82\xd3\xe4\x93\x02@";/v1beta1/{database=projects/*/databases/*}/documents:listen:\x01*(\x01\x30\x01\x12\x9f\x02\n\x11ListCollectionIds\x12\x32.google.firestore.v1beta1.ListCollectionIdsRequest\x1a\x33.google.firestore.v1beta1.ListCollectionIdsResponse"\xa0\x01\x82\xd3\xe4\x93\x02\x99\x01"D/v1beta1/{parent=projects/*/databases/*/documents}:listCollectionIds:\x01*ZN"I/v1beta1/{parent=projects/*/databases/*/documents/*/**}:listCollectionIds:\x01*\x1av\xca\x41\x18\x66irestore.googleapis.com\xd2\x41Xhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastoreB\xbc\x01\n\x1c\x63om.google.firestore.v1beta1B\x0e\x46irestoreProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -52,6 +53,7 @@ google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -80,8 +82,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4614, - serialized_end=4692, + serialized_start=4639, + serialized_end=4717, ) _sym_db.RegisterEnumDescriptor(_TARGETCHANGE_TARGETCHANGETYPE) @@ -182,8 +184,8 @@ fields=[], ) ], - serialized_start=404, - serialized_end=588, + serialized_start=429, + serialized_end=613, ) @@ -373,8 +375,8 @@ fields=[], ) ], - serialized_start=591, - serialized_end=881, + serialized_start=616, + serialized_end=906, ) @@ -430,8 +432,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=883, - serialized_end=986, + serialized_start=908, + serialized_end=1011, ) @@ -541,8 +543,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=989, - serialized_end=1180, + serialized_start=1014, + serialized_end=1205, ) @@ -634,8 +636,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1183, - serialized_end=1441, + serialized_start=1208, + serialized_end=1466, ) @@ -691,8 +693,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1443, - serialized_end=1546, + serialized_start=1468, + serialized_end=1571, ) @@ -828,8 +830,8 @@ fields=[], ) ], - serialized_start=1549, - serialized_end=1835, + serialized_start=1574, + serialized_end=1860, ) @@ -929,8 +931,8 @@ fields=[], ) ], - serialized_start=1838, - serialized_end=2015, + serialized_start=1863, + serialized_end=2040, ) @@ -986,8 +988,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2017, - serialized_end=2123, + serialized_start=2042, + serialized_end=2148, ) @@ -1025,8 +1027,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2125, - serialized_end=2172, + serialized_start=2150, + serialized_end=2197, ) @@ -1100,8 +1102,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2174, - serialized_end=2277, + serialized_start=2199, + serialized_end=2302, ) @@ -1157,8 +1159,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2279, - serialized_end=2406, + serialized_start=2304, + serialized_end=2431, ) @@ -1214,8 +1216,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2408, - serialized_end=2464, + serialized_start=2433, + serialized_end=2489, ) @@ -1340,8 +1342,8 @@ fields=[], ), ], - serialized_start=2467, - serialized_end=2754, + serialized_start=2492, + serialized_end=2779, ) @@ -1433,8 +1435,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2757, - serialized_end=2922, + serialized_start=2782, + serialized_end=2947, ) @@ -1490,8 +1492,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3117, - serialized_end=3162, + serialized_start=3142, + serialized_end=3187, ) _WRITEREQUEST = _descriptor.Descriptor( @@ -1600,8 +1602,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2925, - serialized_end=3162, + serialized_start=2950, + serialized_end=3187, ) @@ -1693,8 +1695,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3165, - serialized_end=3332, + serialized_start=3190, + serialized_end=3357, ) @@ -1750,8 +1752,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3117, - serialized_end=3162, + serialized_start=3142, + serialized_end=3187, ) _LISTENREQUEST = _descriptor.Descriptor( @@ -1850,8 +1852,8 @@ fields=[], ) ], - serialized_start=3335, - serialized_end=3582, + serialized_start=3360, + serialized_end=3607, ) @@ -1969,8 +1971,8 @@ fields=[], ) ], - serialized_start=3585, - serialized_end=3951, + serialized_start=3610, + serialized_end=3976, ) @@ -2008,8 +2010,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4204, - serialized_end=4240, + serialized_start=4229, + serialized_end=4265, ) _TARGET_QUERYTARGET = _descriptor.Descriptor( @@ -2072,8 +2074,8 @@ fields=[], ) ], - serialized_start=4242, - serialized_end=4356, + serialized_start=4267, + serialized_end=4381, ) _TARGET = _descriptor.Descriptor( @@ -2215,8 +2217,8 @@ fields=[], ), ], - serialized_start=3954, - serialized_end=4386, + serialized_start=3979, + serialized_end=4411, ) @@ -2326,8 +2328,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4389, - serialized_end=4692, + serialized_start=4414, + serialized_end=4717, ) @@ -2401,8 +2403,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4694, - serialized_end=4775, + serialized_start=4719, + serialized_end=4800, ) @@ -2458,8 +2460,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4777, - serialized_end=4853, + serialized_start=4802, + serialized_end=4878, ) _GETDOCUMENTREQUEST.fields_by_name[ @@ -3515,12 +3517,8 @@ Start listening after a specific ``read_time``. The client must know the state of matching documents at this time. target_id: - A client provided target ID. If not set, the server will - assign an ID for the target. Used for resuming a target - without changing IDs. The IDs can either be client-assigned or - be server-assigned in a previous stream. All targets with - client provided IDs must be added before adding a target that - needs a server-assigned id. + The target ID that identifies the target on the stream. Must + be a positive number and non-zero. once: If the target should be removed once it is current and consistent. @@ -3546,12 +3544,8 @@ The type of change that occurred. target_ids: The target IDs of targets that have changed. If empty, the - change applies to all targets. For - ``target_change_type=ADD``, the order of the target IDs - matches the order of the requests to add the targets. This - allows clients to unambiguously associate server-assigned - target IDs with added targets. For other states, the order of - the target IDs is not defined. + change applies to all targets. The order of the target IDs is + not defined. cause: The error that resulted in this change, if applicable. resume_token: @@ -3632,9 +3626,11 @@ full_name="google.firestore.v1beta1.Firestore", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=4856, - serialized_end=7360, + serialized_options=_b( + "\312A\030firestore.googleapis.com\322AXhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/datastore" + ), + serialized_start=4881, + serialized_end=7505, methods=[ _descriptor.MethodDescriptor( name="GetDocument", diff --git a/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py b/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py index e3bd63b73f35..cf23b20c3884 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/firestore_pb2_grpc.py @@ -11,7 +11,9 @@ class FirestoreStub(object): - """The Cloud Firestore service. + """Specification of the Firestore API. + + The Cloud Firestore service. This service exposes several types of comparable timestamps: @@ -103,7 +105,9 @@ def __init__(self, channel): class FirestoreServicer(object): - """The Cloud Firestore service. + """Specification of the Firestore API. + + The Cloud Firestore service. This service exposes several types of comparable timestamps: diff --git a/firestore/google/cloud/firestore_v1beta1/proto/query.proto b/firestore/google/cloud/firestore_v1beta1/proto/query.proto index 94eec9cbbf3f..a8068ae6c2f3 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/query.proto +++ b/firestore/google/cloud/firestore_v1beta1/proto/query.proto @@ -29,7 +29,6 @@ option java_package = "com.google.firestore.v1beta1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\V1beta1"; - // A Firestore query. message StructuredQuery { // A selection of a collection, such as `messages as m1`. @@ -103,6 +102,14 @@ message StructuredQuery { // Contains. Requires that the field is an array. ARRAY_CONTAINS = 7; + + // In. Requires that `value` is a non-empty ArrayValue with at most 10 + // values. + IN = 8; + + // Contains any. Requires that the field is an array and + // `value` is a non-empty ArrayValue with at most 10 values. + ARRAY_CONTAINS_ANY = 9; } // The field to filter by. @@ -115,15 +122,6 @@ message StructuredQuery { Value value = 3; } - // The projection of document's fields to return. - message Projection { - // The fields to return. - // - // If empty, all fields are returned. To only return the name - // of the document, use `['__name__']`. - repeated FieldReference fields = 2; - } - // A filter with a single operand. message UnaryFilter { // A unary operator. @@ -134,7 +132,7 @@ message StructuredQuery { // Test if a field is equal to NaN. IS_NAN = 2; - // Test if an exprestion evaluates to Null. + // Test if an expression evaluates to Null. IS_NULL = 3; } @@ -157,6 +155,20 @@ message StructuredQuery { Direction direction = 2; } + // The projection of document's fields to return. + message Projection { + // The fields to return. + // + // If empty, all fields are returned. To only return the name + // of the document, use `['__name__']`. + repeated FieldReference fields = 2; + } + + // A reference to a field, such as `max(messages.time) as max_time`. + message FieldReference { + string field_path = 2; + } + // A sort direction. enum Direction { // Unspecified. @@ -169,11 +181,6 @@ message StructuredQuery { DESCENDING = 2; } - // A reference to a field, such as `max(messages.time) as max_time`. - message FieldReference { - string field_path = 2; - } - // The projection to return. Projection select = 1; diff --git a/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py index 81bc4b3361b6..70c26f514e23 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py @@ -30,7 +30,7 @@ "\n\034com.google.firestore.v1beta1B\nQueryProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1" ), serialized_pb=_b( - '\n0google/cloud/firestore_v1beta1/proto/query.proto\x12\x18google.firestore.v1beta1\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"\xb9\x0f\n\x0fStructuredQuery\x12\x44\n\x06select\x18\x01 \x01(\x0b\x32\x34.google.firestore.v1beta1.StructuredQuery.Projection\x12J\n\x04\x66rom\x18\x02 \x03(\x0b\x32<.google.firestore.v1beta1.StructuredQuery.CollectionSelector\x12?\n\x05where\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter\x12\x41\n\x08order_by\x18\x04 \x03(\x0b\x32/.google.firestore.v1beta1.StructuredQuery.Order\x12\x32\n\x08start_at\x18\x07 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x30\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12*\n\x05limit\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x1a\x44\n\x12\x43ollectionSelector\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x17\n\x0f\x61ll_descendants\x18\x03 \x01(\x08\x1a\x8c\x02\n\x06\x46ilter\x12U\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\x39.google.firestore.v1beta1.StructuredQuery.CompositeFilterH\x00\x12M\n\x0c\x66ield_filter\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.FieldFilterH\x00\x12M\n\x0cunary_filter\x18\x03 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.UnaryFilterH\x00\x42\r\n\x0b\x66ilter_type\x1a\xd3\x01\n\x0f\x43ompositeFilter\x12N\n\x02op\x18\x01 \x01(\x0e\x32\x42.google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator\x12\x41\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\x1a\xec\x02\n\x0b\x46ieldFilter\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12J\n\x02op\x18\x02 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value"\x97\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x12\n\x0e\x41RRAY_CONTAINS\x10\x07\x1aV\n\nProjection\x12H\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x1a\xf3\x01\n\x0bUnaryFilter\x12J\n\x02op\x18\x01 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator\x12I\n\x05\x66ield\x18\x02 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReferenceH\x00"=\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\n\n\x06IS_NAN\x10\x02\x12\x0b\n\x07IS_NULL\x10\x03\x42\x0e\n\x0coperand_type\x1a\x98\x01\n\x05Order\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12\x46\n\tdirection\x18\x02 \x01(\x0e\x32\x33.google.firestore.v1beta1.StructuredQuery.Direction\x1a$\n\x0e\x46ieldReference\x12\x12\n\nfield_path\x18\x02 \x01(\t"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"I\n\x06\x43ursor\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value\x12\x0e\n\x06\x62\x65\x66ore\x18\x02 \x01(\x08\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nQueryProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' + '\n0google/cloud/firestore_v1beta1/proto/query.proto\x12\x18google.firestore.v1beta1\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"\xd9\x0f\n\x0fStructuredQuery\x12\x44\n\x06select\x18\x01 \x01(\x0b\x32\x34.google.firestore.v1beta1.StructuredQuery.Projection\x12J\n\x04\x66rom\x18\x02 \x03(\x0b\x32<.google.firestore.v1beta1.StructuredQuery.CollectionSelector\x12?\n\x05where\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter\x12\x41\n\x08order_by\x18\x04 \x03(\x0b\x32/.google.firestore.v1beta1.StructuredQuery.Order\x12\x32\n\x08start_at\x18\x07 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x30\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12*\n\x05limit\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x1a\x44\n\x12\x43ollectionSelector\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x17\n\x0f\x61ll_descendants\x18\x03 \x01(\x08\x1a\x8c\x02\n\x06\x46ilter\x12U\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\x39.google.firestore.v1beta1.StructuredQuery.CompositeFilterH\x00\x12M\n\x0c\x66ield_filter\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.FieldFilterH\x00\x12M\n\x0cunary_filter\x18\x03 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.UnaryFilterH\x00\x42\r\n\x0b\x66ilter_type\x1a\xd3\x01\n\x0f\x43ompositeFilter\x12N\n\x02op\x18\x01 \x01(\x0e\x32\x42.google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator\x12\x41\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\x1a\x8c\x03\n\x0b\x46ieldFilter\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12J\n\x02op\x18\x02 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value"\xb7\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x12\n\x0e\x41RRAY_CONTAINS\x10\x07\x12\x06\n\x02IN\x10\x08\x12\x16\n\x12\x41RRAY_CONTAINS_ANY\x10\t\x1a\xf3\x01\n\x0bUnaryFilter\x12J\n\x02op\x18\x01 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator\x12I\n\x05\x66ield\x18\x02 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReferenceH\x00"=\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\n\n\x06IS_NAN\x10\x02\x12\x0b\n\x07IS_NULL\x10\x03\x42\x0e\n\x0coperand_type\x1a\x98\x01\n\x05Order\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12\x46\n\tdirection\x18\x02 \x01(\x0e\x32\x33.google.firestore.v1beta1.StructuredQuery.Direction\x1aV\n\nProjection\x12H\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x1a$\n\x0e\x46ieldReference\x12\x12\n\nfield_path\x18\x02 \x01(\t"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"I\n\x06\x43ursor\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value\x12\x0e\n\x06\x62\x65\x66ore\x18\x02 \x01(\x08\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nQueryProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' ), dependencies=[ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR, @@ -103,11 +103,21 @@ _descriptor.EnumValueDescriptor( name="ARRAY_CONTAINS", index=6, number=7, serialized_options=None, type=None ), + _descriptor.EnumValueDescriptor( + name="IN", index=7, number=8, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ARRAY_CONTAINS_ANY", + index=8, + number=9, + serialized_options=None, + type=None, + ), ], containing_type=None, serialized_options=None, serialized_start=1422, - serialized_end=1573, + serialized_end=1605, ) _sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_FIELDFILTER_OPERATOR) @@ -133,8 +143,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1830, - serialized_end=1891, + serialized_start=1774, + serialized_end=1835, ) _sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_UNARYFILTER_OPERATOR) @@ -160,8 +170,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2102, - serialized_end=2171, + serialized_start=2134, + serialized_end=2203, ) _sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_DIRECTION) @@ -431,45 +441,7 @@ extension_ranges=[], oneofs=[], serialized_start=1209, - serialized_end=1573, -) - -_STRUCTUREDQUERY_PROJECTION = _descriptor.Descriptor( - name="Projection", - full_name="google.firestore.v1beta1.StructuredQuery.Projection", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="fields", - full_name="google.firestore.v1beta1.StructuredQuery.Projection.fields", - index=0, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1575, - serialized_end=1661, + serialized_end=1605, ) _STRUCTUREDQUERY_UNARYFILTER = _descriptor.Descriptor( @@ -532,8 +504,8 @@ fields=[], ) ], - serialized_start=1664, - serialized_end=1907, + serialized_start=1608, + serialized_end=1851, ) _STRUCTUREDQUERY_ORDER = _descriptor.Descriptor( @@ -588,8 +560,46 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1910, - serialized_end=2062, + serialized_start=1854, + serialized_end=2006, +) + +_STRUCTUREDQUERY_PROJECTION = _descriptor.Descriptor( + name="Projection", + full_name="google.firestore.v1beta1.StructuredQuery.Projection", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="fields", + full_name="google.firestore.v1beta1.StructuredQuery.Projection.fields", + index=0, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2008, + serialized_end=2094, ) _STRUCTUREDQUERY_FIELDREFERENCE = _descriptor.Descriptor( @@ -626,8 +636,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2064, - serialized_end=2100, + serialized_start=2096, + serialized_end=2132, ) _STRUCTUREDQUERY = _descriptor.Descriptor( @@ -788,9 +798,9 @@ _STRUCTUREDQUERY_FILTER, _STRUCTUREDQUERY_COMPOSITEFILTER, _STRUCTUREDQUERY_FIELDFILTER, - _STRUCTUREDQUERY_PROJECTION, _STRUCTUREDQUERY_UNARYFILTER, _STRUCTUREDQUERY_ORDER, + _STRUCTUREDQUERY_PROJECTION, _STRUCTUREDQUERY_FIELDREFERENCE, ], enum_types=[_STRUCTUREDQUERY_DIRECTION], @@ -800,7 +810,7 @@ extension_ranges=[], oneofs=[], serialized_start=194, - serialized_end=2171, + serialized_end=2203, ) @@ -856,8 +866,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2173, - serialized_end=2246, + serialized_start=2205, + serialized_end=2278, ) _STRUCTUREDQUERY_COLLECTIONSELECTOR.containing_type = _STRUCTUREDQUERY @@ -912,10 +922,6 @@ ) _STRUCTUREDQUERY_FIELDFILTER.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY_FIELDFILTER_OPERATOR.containing_type = _STRUCTUREDQUERY_FIELDFILTER -_STRUCTUREDQUERY_PROJECTION.fields_by_name[ - "fields" -].message_type = _STRUCTUREDQUERY_FIELDREFERENCE -_STRUCTUREDQUERY_PROJECTION.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY_UNARYFILTER.fields_by_name[ "op" ].enum_type = _STRUCTUREDQUERY_UNARYFILTER_OPERATOR @@ -937,6 +943,10 @@ "direction" ].enum_type = _STRUCTUREDQUERY_DIRECTION _STRUCTUREDQUERY_ORDER.containing_type = _STRUCTUREDQUERY +_STRUCTUREDQUERY_PROJECTION.fields_by_name[ + "fields" +].message_type = _STRUCTUREDQUERY_FIELDREFERENCE +_STRUCTUREDQUERY_PROJECTION.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY_FIELDREFERENCE.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY.fields_by_name["select"].message_type = _STRUCTUREDQUERY_PROJECTION _STRUCTUREDQUERY.fields_by_name[ @@ -1046,23 +1056,6 @@ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldFilter) ), ), - Projection=_reflection.GeneratedProtocolMessageType( - "Projection", - (_message.Message,), - dict( - DESCRIPTOR=_STRUCTUREDQUERY_PROJECTION, - __module__="google.cloud.firestore_v1beta1.proto.query_pb2", - __doc__="""The projection of document's fields to return. - - - Attributes: - fields: - The fields to return. If empty, all fields are returned. To - only return the name of the document, use ``['__name__']``. - """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Projection) - ), - ), UnaryFilter=_reflection.GeneratedProtocolMessageType( "UnaryFilter", (_message.Message,), @@ -1101,6 +1094,23 @@ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Order) ), ), + Projection=_reflection.GeneratedProtocolMessageType( + "Projection", + (_message.Message,), + dict( + DESCRIPTOR=_STRUCTUREDQUERY_PROJECTION, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""The projection of document's fields to return. + + + Attributes: + fields: + The fields to return. If empty, all fields are returned. To + only return the name of the document, use ``['__name__']``. + """, + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Projection) + ), + ), FieldReference=_reflection.GeneratedProtocolMessageType( "FieldReference", (_message.Message,), @@ -1158,9 +1168,9 @@ _sym_db.RegisterMessage(StructuredQuery.Filter) _sym_db.RegisterMessage(StructuredQuery.CompositeFilter) _sym_db.RegisterMessage(StructuredQuery.FieldFilter) -_sym_db.RegisterMessage(StructuredQuery.Projection) _sym_db.RegisterMessage(StructuredQuery.UnaryFilter) _sym_db.RegisterMessage(StructuredQuery.Order) +_sym_db.RegisterMessage(StructuredQuery.Projection) _sym_db.RegisterMessage(StructuredQuery.FieldReference) Cursor = _reflection.GeneratedProtocolMessageType( diff --git a/firestore/google/cloud/firestore_v1beta1/proto/write.proto b/firestore/google/cloud/firestore_v1beta1/proto/write.proto index 4e58cc1216e1..c02a2a8a1ac1 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/write.proto +++ b/firestore/google/cloud/firestore_v1beta1/proto/write.proto @@ -30,7 +30,6 @@ option java_package = "com.google.firestore.v1beta1"; option objc_class_prefix = "GCFS"; option php_namespace = "Google\\Cloud\\Firestore\\V1beta1"; - // A write on a document. message Write { // The operation to execute. diff --git a/firestore/setup.py b/firestore/setup.py index 0c736ab3c028..6a114864beaf 100644 --- a/firestore/setup.py +++ b/firestore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-firestore" description = "Google Cloud Firestore API client library" -version = "1.4.0" +version = "1.6.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' @@ -30,7 +30,7 @@ release_status = "Development Status :: 4 - Beta" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", "pytz", ] extras = {} diff --git a/firestore/synth.metadata b/firestore/synth.metadata index a3d73c559f1a..e22035e702bf 100644 --- a/firestore/synth.metadata +++ b/firestore/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-09-04T12:19:37.793382Z", + "updateTime": "2019-10-10T12:25:00.305808Z", "sources": [ { "generator": { "name": "artman", - "version": "0.36.2", - "dockerImage": "googleapis/artman@sha256:0e6f3a668cd68afc768ecbe08817cf6e56a0e64fcbdb1c58c3b97492d12418a1" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "a2158681f6e30c5fd9446eb1fd7b5021a6d48bfa", - "internalRef": "266999433" + "sha": "10f91fa12f70e8e0209a45fc10807ed1f77c7e4e", + "internalRef": "273826591" } }, { diff --git a/firestore/tests/system/test_system.py b/firestore/tests/system/test_system.py index f2d30c94a171..71ac07fcee74 100644 --- a/firestore/tests/system/test_system.py +++ b/firestore/tests/system/test_system.py @@ -492,11 +492,13 @@ def test_collection_add(client, cleanup): assert set(collection3.list_documents()) == {document_ref5} -def test_query_stream(client, cleanup): +@pytest.fixture +def query_docs(client): collection_id = "qs" + UNIQUE_RESOURCE_ID sub_collection = "child" + UNIQUE_RESOURCE_ID collection = client.collection(collection_id, "doc", sub_collection) + cleanup = [] stored = {} num_vals = 5 allowed_vals = six.moves.xrange(num_vals) @@ -505,38 +507,82 @@ def test_query_stream(client, cleanup): document_data = { "a": a_val, "b": b_val, + "c": [a_val, num_vals * 100], "stats": {"sum": a_val + b_val, "product": a_val * b_val}, } _, doc_ref = collection.add(document_data) # Add to clean-up. - cleanup(doc_ref.delete) + cleanup.append(doc_ref.delete) stored[doc_ref.id] = document_data - # 0. Limit to snapshots where ``a==1``. - query0 = collection.where("a", "==", 1) - values0 = {snapshot.id: snapshot.to_dict() for snapshot in query0.stream()} - assert len(values0) == num_vals - for key, value in six.iteritems(values0): + yield collection, stored, allowed_vals + + for operation in cleanup: + operation() + + +def test_query_stream_w_simple_field_eq_op(query_docs): + collection, stored, allowed_vals = query_docs + query = collection.where("a", "==", 1) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(values) == len(allowed_vals) + for key, value in six.iteritems(values): + assert stored[key] == value + assert value["a"] == 1 + + +def test_query_stream_w_simple_field_array_contains_op(query_docs): + collection, stored, allowed_vals = query_docs + query = collection.where("c", "array_contains", 1) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(values) == len(allowed_vals) + for key, value in six.iteritems(values): + assert stored[key] == value + assert value["a"] == 1 + + +def test_query_stream_w_simple_field_in_op(query_docs): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + query = collection.where("a", "in", [1, num_vals + 100]) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(values) == len(allowed_vals) + for key, value in six.iteritems(values): assert stored[key] == value assert value["a"] == 1 - # 1. Order by ``b``. - query1 = collection.order_by("b", direction=query0.DESCENDING) - values1 = [(snapshot.id, snapshot.to_dict()) for snapshot in query1.stream()] - assert len(values1) == len(stored) - b_vals1 = [] - for key, value in values1: + +def test_query_stream_w_simple_field_array_contains_any_op(query_docs): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + query = collection.where("c", "array_contains_any", [1, num_vals * 200]) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(values) == len(allowed_vals) + for key, value in six.iteritems(values): assert stored[key] == value - b_vals1.append(value["b"]) + assert value["a"] == 1 + + +def test_query_stream_w_order_by(query_docs): + collection, stored, allowed_vals = query_docs + query = collection.order_by("b", direction=firestore.Query.DESCENDING) + values = [(snapshot.id, snapshot.to_dict()) for snapshot in query.stream()] + assert len(values) == len(stored) + b_vals = [] + for key, value in values: + assert stored[key] == value + b_vals.append(value["b"]) # Make sure the ``b``-values are in DESCENDING order. - assert sorted(b_vals1, reverse=True) == b_vals1 + assert sorted(b_vals, reverse=True) == b_vals + - # 2. Limit to snapshots where ``stats.sum > 1`` (a field path). - query2 = collection.where("stats.sum", ">", 4) - values2 = {snapshot.id: snapshot.to_dict() for snapshot in query2.stream()} - assert len(values2) == 10 +def test_query_stream_w_field_path(query_docs): + collection, stored, allowed_vals = query_docs + query = collection.where("stats.sum", ">", 4) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(values) == 10 ab_pairs2 = set() - for key, value in six.iteritems(values2): + for key, value in six.iteritems(values): assert stored[key] == value ab_pairs2.add((value["a"], value["b"])) @@ -550,63 +596,72 @@ def test_query_stream(client, cleanup): ) assert expected_ab_pairs == ab_pairs2 - # 3. Use a start and end cursor. - query3 = ( + +def test_query_stream_w_start_end_cursor(query_docs): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + query = ( collection.order_by("a") .start_at({"a": num_vals - 2}) .end_before({"a": num_vals - 1}) ) - values3 = [(snapshot.id, snapshot.to_dict()) for snapshot in query3.stream()] - assert len(values3) == num_vals - for key, value in values3: + values = [(snapshot.id, snapshot.to_dict()) for snapshot in query.stream()] + assert len(values) == num_vals + for key, value in values: assert stored[key] == value assert value["a"] == num_vals - 2 - b_vals1.append(value["b"]) - - # 4. Send a query with no results. - query4 = collection.where("b", "==", num_vals + 100) - values4 = list(query4.stream()) - assert len(values4) == 0 - - # 5. Select a subset of fields. - query5 = collection.where("b", "<=", 1) - query5 = query5.select(["a", "stats.product"]) - values5 = {snapshot.id: snapshot.to_dict() for snapshot in query5.stream()} - assert len(values5) == num_vals * 2 # a ANY, b in (0, 1) - for key, value in six.iteritems(values5): + + +def test_query_stream_wo_results(query_docs): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + query = collection.where("b", "==", num_vals + 100) + values = list(query.stream()) + assert len(values) == 0 + + +def test_query_stream_w_projection(query_docs): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + query = collection.where("b", "<=", 1).select(["a", "stats.product"]) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(values) == num_vals * 2 # a ANY, b in (0, 1) + for key, value in six.iteritems(values): expected = { "a": stored[key]["a"], "stats": {"product": stored[key]["stats"]["product"]}, } assert expected == value - # 6. Add multiple filters via ``where()``. - query6 = collection.where("stats.product", ">", 5) - query6 = query6.where("stats.product", "<", 10) - values6 = {snapshot.id: snapshot.to_dict() for snapshot in query6.stream()} +def test_query_stream_w_multiple_filters(query_docs): + collection, stored, allowed_vals = query_docs + query = collection.where("stats.product", ">", 5).where("stats.product", "<", 10) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} matching_pairs = [ (a_val, b_val) for a_val in allowed_vals for b_val in allowed_vals if 5 < a_val * b_val < 10 ] - assert len(values6) == len(matching_pairs) - for key, value in six.iteritems(values6): + assert len(values) == len(matching_pairs) + for key, value in six.iteritems(values): assert stored[key] == value pair = (value["a"], value["b"]) assert pair in matching_pairs - # 7. Skip the first three results, when ``b==2`` - query7 = collection.where("b", "==", 2) + +def test_query_stream_w_offset(query_docs): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) offset = 3 - query7 = query7.offset(offset) - values7 = {snapshot.id: snapshot.to_dict() for snapshot in query7.stream()} + query = collection.where("b", "==", 2).offset(offset) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} # NOTE: We don't check the ``a``-values, since that would require # an ``order_by('a')``, which combined with the ``b == 2`` # filter would necessitate an index. - assert len(values7) == num_vals - offset - for key, value in six.iteritems(values7): + assert len(values) == num_vals - offset + for key, value in six.iteritems(values): assert stored[key] == value assert value["b"] == 2 diff --git a/firestore/tests/unit/v1/test_collection.py b/firestore/tests/unit/v1/test_collection.py index 213b32e13a85..fde538b9db9c 100644 --- a/firestore/tests/unit/v1/test_collection.py +++ b/firestore/tests/unit/v1/test_collection.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime import types import unittest @@ -193,7 +192,7 @@ def test_add_auto_assigned(self): from google.cloud.firestore_v1.proto import document_pb2 from google.cloud.firestore_v1.document import DocumentReference from google.cloud.firestore_v1 import SERVER_TIMESTAMP - from google.cloud.firestore_v1._helpers import pbs_for_set_no_merge + from google.cloud.firestore_v1._helpers import pbs_for_create # Create a minimal fake GAPIC add attach it to a real client. firestore_api = mock.Mock(spec=["create_document", "commit"]) @@ -214,42 +213,32 @@ def test_add_auto_assigned(self): # Actually make a collection. collection = self._make_one("grand-parent", "parent", "child", client=client) - # Add a dummy response for the fake GAPIC. - parent_path = collection.parent._document_path - auto_assigned_id = "cheezburger" - name = "{}/{}/{}".format(parent_path, collection.id, auto_assigned_id) - create_doc_response = document_pb2.Document(name=name) - create_doc_response.update_time.FromDatetime(datetime.datetime.utcnow()) - firestore_api.create_document.return_value = create_doc_response - # Actually call add() on our collection; include a transform to make # sure transforms during adds work. document_data = {"been": "here", "now": SERVER_TIMESTAMP} - update_time, document_ref = collection.add(document_data) + + patch = mock.patch("google.cloud.firestore_v1.collection._auto_id") + random_doc_id = "DEADBEEF" + with patch as patched: + patched.return_value = random_doc_id + update_time, document_ref = collection.add(document_data) # Verify the response and the mocks. self.assertIs(update_time, mock.sentinel.update_time) self.assertIsInstance(document_ref, DocumentReference) self.assertIs(document_ref._client, client) - expected_path = collection._path + (auto_assigned_id,) + expected_path = collection._path + (random_doc_id,) self.assertEqual(document_ref._path, expected_path) - expected_document_pb = document_pb2.Document() - firestore_api.create_document.assert_called_once_with( - parent_path, - collection_id=collection.id, - document_id=None, - document=expected_document_pb, - mask=None, - metadata=client._rpc_metadata, - ) - write_pbs = pbs_for_set_no_merge(document_ref._document_path, document_data) + write_pbs = pbs_for_create(document_ref._document_path, document_data) firestore_api.commit.assert_called_once_with( client._database_string, write_pbs, transaction=None, metadata=client._rpc_metadata, ) + # Since we generate the ID locally, we don't call 'create_document'. + firestore_api.create_document.assert_not_called() @staticmethod def _write_pb_for_create(document_path, document_data): diff --git a/firestore/tests/unit/v1/test_query.py b/firestore/tests/unit/v1/test_query.py index a4911fecb44f..bdb0e922d00b 100644 --- a/firestore/tests/unit/v1/test_query.py +++ b/firestore/tests/unit/v1/test_query.py @@ -1464,18 +1464,47 @@ def _call_fut(op_string): return _enum_from_op_string(op_string) - def test_success(self): + @staticmethod + def _get_op_class(): from google.cloud.firestore_v1.gapic import enums - op_class = enums.StructuredQuery.FieldFilter.Operator + return enums.StructuredQuery.FieldFilter.Operator + + def test_lt(self): + op_class = self._get_op_class() self.assertEqual(self._call_fut("<"), op_class.LESS_THAN) + + def test_le(self): + op_class = self._get_op_class() self.assertEqual(self._call_fut("<="), op_class.LESS_THAN_OR_EQUAL) + + def test_eq(self): + op_class = self._get_op_class() self.assertEqual(self._call_fut("=="), op_class.EQUAL) + + def test_ge(self): + op_class = self._get_op_class() self.assertEqual(self._call_fut(">="), op_class.GREATER_THAN_OR_EQUAL) + + def test_gt(self): + op_class = self._get_op_class() self.assertEqual(self._call_fut(">"), op_class.GREATER_THAN) + + def test_array_contains(self): + op_class = self._get_op_class() self.assertEqual(self._call_fut("array_contains"), op_class.ARRAY_CONTAINS) - def test_failure(self): + def test_in(self): + op_class = self._get_op_class() + self.assertEqual(self._call_fut("in"), op_class.IN) + + def test_array_contains_any(self): + op_class = self._get_op_class() + self.assertEqual( + self._call_fut("array_contains_any"), op_class.ARRAY_CONTAINS_ANY + ) + + def test_invalid(self): with self.assertRaises(ValueError): self._call_fut("?") diff --git a/grafeas/CHANGELOG.md b/grafeas/CHANGELOG.md index c9f55c42c36d..b66a945bfffe 100644 --- a/grafeas/CHANGELOG.md +++ b/grafeas/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/grafeas/#history +## 0.3.0 + +10-10-2019 11:28 PDT + + +### Implementation Changes +- Remove send / receive message size limit (via synth). ([#8981](https://github.com/googleapis/google-cloud-python/pull/8981)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) + ## 0.2.0 07-12-2019 17:04 PDT diff --git a/grafeas/docs/conf.py b/grafeas/docs/conf.py index 463061ed3e4a..cf600a20ac58 100644 --- a/grafeas/docs/conf.py +++ b/grafeas/docs/conf.py @@ -330,7 +330,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/grafeas/grafeas/grafeas.py b/grafeas/grafeas/grafeas.py index 768aa8c77f29..35dae0565d59 100644 --- a/grafeas/grafeas/grafeas.py +++ b/grafeas/grafeas/grafeas.py @@ -22,4 +22,8 @@ from grafeas.grafeas_v1 import types -__all__ = ("enums", "types", "GrafeasClient") +__all__ = ( + "enums", + "types", + "GrafeasClient", +) diff --git a/grafeas/grafeas/grafeas_v1/__init__.py b/grafeas/grafeas/grafeas_v1/__init__.py index 9bbb0db16767..24d3a43d8000 100644 --- a/grafeas/grafeas/grafeas_v1/__init__.py +++ b/grafeas/grafeas/grafeas_v1/__init__.py @@ -27,4 +27,8 @@ class GrafeasClient(grafeas_client.GrafeasClient): enums = enums -__all__ = ("enums", "types", "GrafeasClient") +__all__ = ( + "enums", + "types", + "GrafeasClient", +) diff --git a/grafeas/grafeas/grafeas_v1/gapic/grafeas_client.py b/grafeas/grafeas/grafeas_v1/gapic/grafeas_client.py index a7e3c6d31713..544632304ea7 100644 --- a/grafeas/grafeas/grafeas_v1/gapic/grafeas_client.py +++ b/grafeas/grafeas/grafeas_v1/gapic/grafeas_client.py @@ -39,7 +39,7 @@ from grafeas.grafeas_v1.proto import grafeas_pb2_grpc -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("grafeas").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("grafeas",).version class GrafeasClient(object): @@ -69,7 +69,7 @@ class GrafeasClient(object): def note_path(cls, project, note): """Return a fully-qualified note string.""" return google.api_core.path_template.expand( - "projects/{project}/notes/{note}", project=project, note=note + "projects/{project}/notes/{note}", project=project, note=note, ) @classmethod @@ -85,7 +85,7 @@ def occurrence_path(cls, project, occurrence): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) def __init__(self, transport, client_config=None, client_info=None): @@ -125,7 +125,7 @@ def __init__(self, transport, client_config=None, client_info=None): if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -136,7 +136,7 @@ def __init__(self, transport, client_config=None, client_info=None): # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -202,7 +202,7 @@ def get_occurrence( client_info=self._client_info, ) - request = grafeas_pb2.GetOccurrenceRequest(name=name) + request = grafeas_pb2.GetOccurrenceRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -300,7 +300,7 @@ def list_occurrences( ) request = grafeas_pb2.ListOccurrencesRequest( - parent=parent, filter=filter_, page_size=page_size + parent=parent, filter=filter_, page_size=page_size, ) if metadata is None: metadata = [] @@ -385,7 +385,7 @@ def delete_occurrence( client_info=self._client_info, ) - request = grafeas_pb2.DeleteOccurrenceRequest(name=name) + request = grafeas_pb2.DeleteOccurrenceRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -468,7 +468,7 @@ def create_occurrence( ) request = grafeas_pb2.CreateOccurrenceRequest( - parent=parent, occurrence=occurrence + parent=parent, occurrence=occurrence, ) if metadata is None: metadata = [] @@ -552,7 +552,7 @@ def batch_create_occurrences( ) request = grafeas_pb2.BatchCreateOccurrencesRequest( - parent=parent, occurrences=occurrences + parent=parent, occurrences=occurrences, ) if metadata is None: metadata = [] @@ -641,7 +641,7 @@ def update_occurrence( ) request = grafeas_pb2.UpdateOccurrenceRequest( - name=name, occurrence=occurrence, update_mask=update_mask + name=name, occurrence=occurrence, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -717,7 +717,7 @@ def get_occurrence_note( client_info=self._client_info, ) - request = grafeas_pb2.GetOccurrenceNoteRequest(name=name) + request = grafeas_pb2.GetOccurrenceNoteRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -791,7 +791,7 @@ def get_note( client_info=self._client_info, ) - request = grafeas_pb2.GetNoteRequest(name=name) + request = grafeas_pb2.GetNoteRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -889,7 +889,7 @@ def list_notes( ) request = grafeas_pb2.ListNotesRequest( - parent=parent, filter=filter_, page_size=page_size + parent=parent, filter=filter_, page_size=page_size, ) if metadata is None: metadata = [] @@ -972,7 +972,7 @@ def delete_note( client_info=self._client_info, ) - request = grafeas_pb2.DeleteNoteRequest(name=name) + request = grafeas_pb2.DeleteNoteRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1060,7 +1060,7 @@ def create_note( ) request = grafeas_pb2.CreateNoteRequest( - parent=parent, note_id=note_id, note=note + parent=parent, note_id=note_id, note=note, ) if metadata is None: metadata = [] @@ -1143,7 +1143,7 @@ def batch_create_notes( client_info=self._client_info, ) - request = grafeas_pb2.BatchCreateNotesRequest(parent=parent, notes=notes) + request = grafeas_pb2.BatchCreateNotesRequest(parent=parent, notes=notes,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1231,7 +1231,7 @@ def update_note( ) request = grafeas_pb2.UpdateNoteRequest( - name=name, note=note, update_mask=update_mask + name=name, note=note, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -1332,7 +1332,7 @@ def list_note_occurrences( ) request = grafeas_pb2.ListNoteOccurrencesRequest( - name=name, filter=filter_, page_size=page_size + name=name, filter=filter_, page_size=page_size, ) if metadata is None: metadata = [] diff --git a/grafeas/grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py b/grafeas/grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py index 2e438b464d3d..b7769a71eced 100644 --- a/grafeas/grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py +++ b/grafeas/grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py @@ -49,7 +49,7 @@ def __init__(self, address, scopes, channel=None, credentials=None): # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -68,7 +68,9 @@ def __init__(self, address, scopes, channel=None, credentials=None): # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = {"grafeas_stub": grafeas_pb2_grpc.GrafeasStub(channel)} + self._stubs = { + "grafeas_stub": grafeas_pb2_grpc.GrafeasStub(channel), + } @classmethod def create_channel(cls, address, scopes, credentials=None, **kwargs): diff --git a/grafeas/grafeas/grafeas_v1/proto/attestation_pb2.py b/grafeas/grafeas/grafeas_v1/proto/attestation_pb2.py index 356e70bd170a..41f8ca1e2b13 100644 --- a/grafeas/grafeas/grafeas_v1/proto/attestation_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/attestation_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\n"grafeas_v1/proto/attestation.proto\x12\ngrafeas.v1\x1a\x1dgrafeas_v1/proto/common.proto"f\n\x0f\x41ttestationNote\x12.\n\x04hint\x18\x01 \x01(\x0b\x32 .grafeas.v1.AttestationNote.Hint\x1a#\n\x04Hint\x12\x1b\n\x13human_readable_name\x18\x01 \x01(\t"^\n\x15\x41ttestationOccurrence\x12\x1a\n\x12serialized_payload\x18\x01 \x01(\x0c\x12)\n\nsignatures\x18\x02 \x03(\x0b\x32\x15.grafeas.v1.SignatureBQ\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' ), - dependencies=[grafeas__v1_dot_proto_dot_common__pb2.DESCRIPTOR], + dependencies=[grafeas__v1_dot_proto_dot_common__pb2.DESCRIPTOR,], ) @@ -56,7 +56,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -94,10 +94,10 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], - nested_types=[_ATTESTATIONNOTE_HINT], + nested_types=[_ATTESTATIONNOTE_HINT,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/grafeas/grafeas/grafeas_v1/proto/build_pb2.py b/grafeas/grafeas/grafeas_v1/proto/build_pb2.py index 09be6011a42a..affcce26b1f3 100644 --- a/grafeas/grafeas/grafeas_v1/proto/build_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/build_pb2.py @@ -30,7 +30,7 @@ serialized_pb=_b( '\n\x1cgrafeas_v1/proto/build.proto\x12\ngrafeas.v1\x1a!grafeas_v1/proto/provenance.proto"$\n\tBuildNote\x12\x17\n\x0f\x62uilder_version\x18\x01 \x01(\t"\\\n\x0f\x42uildOccurrence\x12/\n\nprovenance\x18\x01 \x01(\x0b\x32\x1b.grafeas.v1.BuildProvenance\x12\x18\n\x10provenance_bytes\x18\x02 \x01(\tBQ\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' ), - dependencies=[grafeas__v1_dot_proto_dot_provenance__pb2.DESCRIPTOR], + dependencies=[grafeas__v1_dot_proto_dot_provenance__pb2.DESCRIPTOR,], ) @@ -58,7 +58,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/grafeas/grafeas/grafeas_v1/proto/deployment_pb2.py b/grafeas/grafeas/grafeas_v1/proto/deployment_pb2.py index 4f410ee35d56..b5fea556d0ac 100644 --- a/grafeas/grafeas/grafeas_v1/proto/deployment_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/deployment_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\n!grafeas_v1/proto/deployment.proto\x12\ngrafeas.v1\x1a\x1fgoogle/protobuf/timestamp.proto"&\n\x0e\x44\x65ploymentNote\x12\x14\n\x0cresource_uri\x18\x01 \x03(\t"\xc7\x02\n\x14\x44\x65ploymentOccurrence\x12\x12\n\nuser_email\x18\x01 \x01(\t\x12/\n\x0b\x64\x65ploy_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rundeploy_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0e\n\x06\x63onfig\x18\x04 \x01(\t\x12\x0f\n\x07\x61\x64\x64ress\x18\x05 \x01(\t\x12\x14\n\x0cresource_uri\x18\x06 \x03(\t\x12;\n\x08platform\x18\x07 \x01(\x0e\x32).grafeas.v1.DeploymentOccurrence.Platform"C\n\x08Platform\x12\x18\n\x14PLATFORM_UNSPECIFIED\x10\x00\x12\x07\n\x03GKE\x10\x01\x12\x08\n\x04\x46LEX\x10\x02\x12\n\n\x06\x43USTOM\x10\x03\x42Q\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR], + dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,], ) @@ -87,7 +87,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -238,7 +238,7 @@ ], extensions=[], nested_types=[], - enum_types=[_DEPLOYMENTOCCURRENCE_PLATFORM], + enum_types=[_DEPLOYMENTOCCURRENCE_PLATFORM,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/grafeas/grafeas/grafeas_v1/proto/discovery_pb2.py b/grafeas/grafeas/grafeas_v1/proto/discovery_pb2.py index e8776e44b0a7..216f8db6ba7e 100644 --- a/grafeas/grafeas/grafeas_v1/proto/discovery_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/discovery_pb2.py @@ -136,7 +136,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2.py b/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2.py index 89af50706aa9..babb2045386f 100644 --- a/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2.py @@ -344,7 +344,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=474, serialized_end=1079, @@ -661,7 +661,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1082, serialized_end=1753, @@ -692,7 +692,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -881,7 +881,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1052,7 +1052,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1091,7 +1091,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1280,7 +1280,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1696,7 +1696,7 @@ ), ], extensions=[], - nested_types=[_BATCHCREATENOTESREQUEST_NOTESENTRY], + nested_types=[_BATCHCREATENOTESREQUEST_NOTESENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1732,7 +1732,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1828,7 +1828,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/grafeas/grafeas/grafeas_v1/proto/package_pb2.py b/grafeas/grafeas/grafeas_v1/proto/package_pb2.py index dbcb619010c9..519f2aa60187 100644 --- a/grafeas/grafeas/grafeas_v1/proto/package_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/package_pb2.py @@ -510,7 +510,7 @@ ], extensions=[], nested_types=[], - enum_types=[_VERSION_VERSIONKIND], + enum_types=[_VERSION_VERSIONKIND,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/grafeas/grafeas/grafeas_v1/proto/provenance_pb2.py b/grafeas/grafeas/grafeas_v1/proto/provenance_pb2.py index 412c42c8db2f..3f25bbc15af8 100644 --- a/grafeas/grafeas/grafeas_v1/proto/provenance_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/provenance_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\n!grafeas_v1/proto/provenance.proto\x12\ngrafeas.v1\x1a\x1fgoogle/protobuf/timestamp.proto"\x90\x04\n\x0f\x42uildProvenance\x12\n\n\x02id\x18\x01 \x01(\t\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12%\n\x08\x63ommands\x18\x03 \x03(\x0b\x32\x13.grafeas.v1.Command\x12-\n\x0f\x62uilt_artifacts\x18\x04 \x03(\x0b\x32\x14.grafeas.v1.Artifact\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nstart_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07\x63reator\x18\x08 \x01(\t\x12\x10\n\x08logs_uri\x18\t \x01(\t\x12-\n\x11source_provenance\x18\n \x01(\x0b\x32\x12.grafeas.v1.Source\x12\x12\n\ntrigger_id\x18\x0b \x01(\t\x12\x44\n\rbuild_options\x18\x0c \x03(\x0b\x32-.grafeas.v1.BuildProvenance.BuildOptionsEntry\x12\x17\n\x0f\x62uilder_version\x18\r \x01(\t\x1a\x33\n\x11\x42uildOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x95\x02\n\x06Source\x12#\n\x1b\x61rtifact_storage_source_uri\x18\x01 \x01(\t\x12\x37\n\x0b\x66ile_hashes\x18\x02 \x03(\x0b\x32".grafeas.v1.Source.FileHashesEntry\x12*\n\x07\x63ontext\x18\x03 \x01(\x0b\x32\x19.grafeas.v1.SourceContext\x12\x36\n\x13\x61\x64\x64itional_contexts\x18\x04 \x03(\x0b\x32\x19.grafeas.v1.SourceContext\x1aI\n\x0f\x46ileHashesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.grafeas.v1.FileHashes:\x02\x38\x01"1\n\nFileHashes\x12#\n\tfile_hash\x18\x01 \x03(\x0b\x32\x10.grafeas.v1.Hash"#\n\x04Hash\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c"]\n\x07\x43ommand\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03\x65nv\x18\x02 \x03(\t\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x0b\n\x03\x64ir\x18\x04 \x01(\t\x12\n\n\x02id\x18\x05 \x01(\t\x12\x10\n\x08wait_for\x18\x06 \x03(\t"7\n\x08\x41rtifact\x12\x10\n\x08\x63hecksum\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\t\x12\r\n\x05names\x18\x03 \x03(\t"\x9a\x02\n\rSourceContext\x12\x38\n\ncloud_repo\x18\x01 \x01(\x0b\x32".grafeas.v1.CloudRepoSourceContextH\x00\x12\x31\n\x06gerrit\x18\x02 \x01(\x0b\x32\x1f.grafeas.v1.GerritSourceContextH\x00\x12+\n\x03git\x18\x03 \x01(\x0b\x32\x1c.grafeas.v1.GitSourceContextH\x00\x12\x35\n\x06labels\x18\x04 \x03(\x0b\x32%.grafeas.v1.SourceContext.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07\x63ontext"\x8a\x01\n\x0c\x41liasContext\x12+\n\x04kind\x18\x01 \x01(\x0e\x32\x1d.grafeas.v1.AliasContext.Kind\x12\x0c\n\x04name\x18\x02 \x01(\t"?\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x46IXED\x10\x01\x12\x0b\n\x07MOVABLE\x10\x02\x12\t\n\x05OTHER\x10\x04"\x93\x01\n\x16\x43loudRepoSourceContext\x12#\n\x07repo_id\x18\x01 \x01(\x0b\x32\x12.grafeas.v1.RepoId\x12\x15\n\x0brevision_id\x18\x02 \x01(\tH\x00\x12\x31\n\ralias_context\x18\x03 \x01(\x0b\x32\x18.grafeas.v1.AliasContextH\x00\x42\n\n\x08revision"\x95\x01\n\x13GerritSourceContext\x12\x10\n\x08host_uri\x18\x01 \x01(\t\x12\x16\n\x0egerrit_project\x18\x02 \x01(\t\x12\x15\n\x0brevision_id\x18\x03 \x01(\tH\x00\x12\x31\n\ralias_context\x18\x04 \x01(\x0b\x32\x18.grafeas.v1.AliasContextH\x00\x42\n\n\x08revision"4\n\x10GitSourceContext\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x13\n\x0brevision_id\x18\x02 \x01(\t"S\n\x06RepoId\x12\x34\n\x0fproject_repo_id\x18\x01 \x01(\x0b\x32\x19.grafeas.v1.ProjectRepoIdH\x00\x12\r\n\x03uid\x18\x02 \x01(\tH\x00\x42\x04\n\x02id"6\n\rProjectRepoId\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x11\n\trepo_name\x18\x02 \x01(\tBQ\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR], + dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,], ) @@ -362,7 +362,7 @@ ), ], extensions=[], - nested_types=[_BUILDPROVENANCE_BUILDOPTIONSENTRY], + nested_types=[_BUILDPROVENANCE_BUILDOPTIONSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -511,7 +511,7 @@ ), ], extensions=[], - nested_types=[_SOURCE_FILEHASHESENTRY], + nested_types=[_SOURCE_FILEHASHESENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -547,7 +547,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -960,7 +960,7 @@ ), ], extensions=[], - nested_types=[_SOURCECONTEXT_LABELSENTRY], + nested_types=[_SOURCECONTEXT_LABELSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -973,7 +973,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1134, serialized_end=1416, @@ -1026,7 +1026,7 @@ ], extensions=[], nested_types=[], - enum_types=[_ALIASCONTEXT_KIND], + enum_types=[_ALIASCONTEXT_KIND,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -1113,7 +1113,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1560, serialized_end=1707, @@ -1214,7 +1214,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1710, serialized_end=1859, @@ -1336,7 +1336,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1915, serialized_end=1998, diff --git a/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2.py b/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2.py index 612904b4a998..ad905b2af64e 100644 --- a/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2.py @@ -440,7 +440,7 @@ ), ], extensions=[], - nested_types=[_VULNERABILITYNOTE_WINDOWSDETAIL_KNOWLEDGEBASE], + nested_types=[_VULNERABILITYNOTE_WINDOWSDETAIL_KNOWLEDGEBASE,], enum_types=[], serialized_options=None, is_extendable=False, @@ -550,7 +550,7 @@ ), ], extensions=[], - nested_types=[_VULNERABILITYNOTE_DETAIL, _VULNERABILITYNOTE_WINDOWSDETAIL], + nested_types=[_VULNERABILITYNOTE_DETAIL, _VULNERABILITYNOTE_WINDOWSDETAIL,], enum_types=[], serialized_options=None, is_extendable=False, @@ -879,7 +879,7 @@ ), ], extensions=[], - nested_types=[_VULNERABILITYOCCURRENCE_PACKAGEISSUE], + nested_types=[_VULNERABILITYOCCURRENCE_PACKAGEISSUE,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/grafeas/grafeas/grafeas_v1/types.py b/grafeas/grafeas/grafeas_v1/types.py index 253b6cadc65f..c6bbfcd9f902 100644 --- a/grafeas/grafeas/grafeas_v1/types.py +++ b/grafeas/grafeas/grafeas_v1/types.py @@ -38,7 +38,13 @@ from grafeas.grafeas_v1.proto import vulnerability_pb2 -_shared_modules = [any_pb2, empty_pb2, field_mask_pb2, timestamp_pb2, status_pb2] +_shared_modules = [ + any_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, + status_pb2, +] _local_modules = [ attestation_pb2, diff --git a/grafeas/setup.py b/grafeas/setup.py index 62fbae2dc527..4c351d7a7101 100644 --- a/grafeas/setup.py +++ b/grafeas/setup.py @@ -21,7 +21,7 @@ name = "grafeas" description = "Grafeas API client library" -version = "0.2.0" +version = "0.3.0" release_status = "Development Status :: 3 - Alpha" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", diff --git a/grafeas/synth.metadata b/grafeas/synth.metadata index 0c0938c03e84..29ee97018821 100644 --- a/grafeas/synth.metadata +++ b/grafeas/synth.metadata @@ -1,25 +1,26 @@ { - "updateTime": "2019-08-06T18:36:30.465284Z", + "updateTime": "2019-10-29T12:26:28.238846Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "53e641721f965a485af64331cfea9e5522294d78" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/iam/docs/conf.py b/iam/docs/conf.py index 475dc68424c1..0c61d6ee8eba 100644 --- a/iam/docs/conf.py +++ b/iam/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/iam/synth.metadata b/iam/synth.metadata index d7a2c8d24679..d8daa260fb6a 100644 --- a/iam/synth.metadata +++ b/iam/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-03T12:12:16.255697Z", + "updateTime": "2019-10-05T12:25:22.123031Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "7b212a8d2319cd81a7b6942c25dbf4550480a06c", - "internalRef": "261339454" + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" } }, { diff --git a/iot/.repo-metadata.json b/iot/.repo-metadata.json index 0fd78d6b99d4..fb7140f7f4d2 100644 --- a/iot/.repo-metadata.json +++ b/iot/.repo-metadata.json @@ -2,7 +2,7 @@ "name": "cloudiot", "name_pretty": "Google Cloud Internet of Things (IoT) Core", "product_documentation": "https://cloud.google.com/iot", - "client_documentation": "https://googleapis.dev/python/iot/latest", + "client_documentation": "https://googleapis.dev/python/cloudiot/latest", "issue_tracker": "https://issuetracker.google.com/issues?q=status:open%20componentid:310170", "release_level": "alpha", "language": "python", diff --git a/iot/docs/conf.py b/iot/docs/conf.py index 547154d69306..44d871cfa605 100644 --- a/iot/docs/conf.py +++ b/iot/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/iot/synth.metadata b/iot/synth.metadata index 6306a7aaffc6..f93b32daed5a 100644 --- a/iot/synth.metadata +++ b/iot/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:28:01.870632Z", + "updateTime": "2019-10-05T12:26:06.841344Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" } }, { diff --git a/irm/docs/conf.py b/irm/docs/conf.py index 9c014936073a..f479af751a51 100644 --- a/irm/docs/conf.py +++ b/irm/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/irm/synth.metadata b/irm/synth.metadata index 4650d21844b1..d6539a1bb446 100644 --- a/irm/synth.metadata +++ b/irm/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:28:51.028503Z", + "updateTime": "2019-10-05T12:26:52.725480Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" } }, { diff --git a/kms/docs/conf.py b/kms/docs/conf.py index 934d87ed45a3..a2c76096b6b5 100644 --- a/kms/docs/conf.py +++ b/kms/docs/conf.py @@ -332,7 +332,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/language/docs/conf.py b/language/docs/conf.py index 198fdcc57c37..999e8f0e5241 100644 --- a/language/docs/conf.py +++ b/language/docs/conf.py @@ -339,7 +339,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/language/google/cloud/language_v1beta2/gapic/enums.py b/language/google/cloud/language_v1beta2/gapic/enums.py index aa68fa4911ec..0d3cf1c591ed 100644 --- a/language/google/cloud/language_v1beta2/gapic/enums.py +++ b/language/google/cloud/language_v1beta2/gapic/enums.py @@ -34,7 +34,7 @@ class EncodingType(enum.IntEnum): based on the UTF-8 encoding of the input. C++ and Go are examples of languages that use this encoding natively. UTF16 (int): Encoding-dependent information (such as ``begin_offset``) is calculated - based on the UTF-16 encoding of the input. Java and Javascript are + based on the UTF-16 encoding of the input. Java and JavaScript are examples of languages that use this encoding natively. UTF32 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based on the UTF-32 encoding of the input. Python is an example of a @@ -242,7 +242,10 @@ class Type(enum.IntEnum): class Entity(object): class Type(enum.IntEnum): """ - The type of the entity. + The type of the entity. For most entity types, the associated metadata + is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph MID + (``mid``). The table below lists the associated fields for entities that + have different metadata. Attributes: UNKNOWN (int): Unknown @@ -250,9 +253,49 @@ class Type(enum.IntEnum): LOCATION (int): Location ORGANIZATION (int): Organization EVENT (int): Event - WORK_OF_ART (int): Work of art - CONSUMER_GOOD (int): Consumer goods - OTHER (int): Other types + WORK_OF_ART (int): Artwork + CONSUMER_GOOD (int): Consumer product + OTHER (int): Other types of entities + PHONE_NUMBER (int): Phone number + + The metadata lists the phone number, formatted according to local + convention, plus whichever additional elements appear in the text: + + - ``number`` - the actual number, broken down into sections as per + local convention + - ``national_prefix`` - country code, if detected + - ``area_code`` - region or area code, if detected + - ``extension`` - phone extension (to be dialed after connection), if + detected + ADDRESS (int): Address + + The metadata identifies the street number and locality plus whichever + additional elements appear in the text: + + - ``street_number`` - street number + - ``locality`` - city or town + - ``street_name`` - street/route name, if detected + - ``postal_code`` - postal code, if detected + - ``country`` - country, if detected< + - ``broad_region`` - administrative area, such as the state, if + detected + - ``narrow_region`` - smaller administrative area, such as county, if + detected + - ``sublocality`` - used in Asian addresses to demark a district within + a city, if detected + DATE (int): Date + + The metadata identifies the components of the date: + + - ``year`` - four digit year, if detected + - ``month`` - two digit month number, if detected + - ``day`` - two digit day number, if detected + NUMBER (int): Number + + The metadata is the number itself. + PRICE (int): Price + + The metadata identifies the ``value`` and ``currency``. """ UNKNOWN = 0 @@ -263,6 +306,11 @@ class Type(enum.IntEnum): WORK_OF_ART = 5 CONSUMER_GOOD = 6 OTHER = 7 + PHONE_NUMBER = 9 + ADDRESS = 10 + DATE = 11 + NUMBER = 12 + PRICE = 13 class EntityMention(object): diff --git a/language/google/cloud/language_v1beta2/gapic/language_service_client.py b/language/google/cloud/language_v1beta2/gapic/language_service_client.py index dcb8e89d7fc6..73af0ff65ede 100644 --- a/language/google/cloud/language_v1beta2/gapic/language_service_client.py +++ b/language/google/cloud/language_v1beta2/gapic/language_service_client.py @@ -207,7 +207,7 @@ def analyze_sentiment( >>> response = client.analyze_sentiment(document) Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Document` @@ -274,7 +274,7 @@ def analyze_entities( >>> response = client.analyze_entities(document) Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Document` @@ -339,7 +339,7 @@ def analyze_entity_sentiment( >>> response = client.analyze_entity_sentiment(document) Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Document` @@ -405,7 +405,7 @@ def analyze_syntax( >>> response = client.analyze_syntax(document) Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Document` @@ -468,7 +468,7 @@ def classify_text( >>> response = client.classify_text(document) Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Document` @@ -534,11 +534,11 @@ def annotate_text( >>> response = client.annotate_text(document, features) Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Document` - features (Union[dict, ~google.cloud.language_v1beta2.types.Features]): The enabled features. + features (Union[dict, ~google.cloud.language_v1beta2.types.Features]): Required. The enabled features. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Features` diff --git a/language/google/cloud/language_v1beta2/proto/language_service.proto b/language/google/cloud/language_v1beta2/proto/language_service.proto index 0263be04aedd..d0242e599759 100644 --- a/language/google/cloud/language_v1beta2/proto/language_service.proto +++ b/language/google/cloud/language_v1beta2/proto/language_service.proto @@ -1,4 +1,4 @@ -// Copyright 2017 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,15 +11,16 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.language.v1beta2; import "google/api/annotations.proto"; -import "google/longrunning/operations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; -import "google/rpc/status.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/language/v1beta2;language"; option java_multiple_files = true; @@ -29,36 +30,42 @@ option java_package = "com.google.cloud.language.v1beta2"; // Provides text analysis operations such as sentiment analysis and entity // recognition. service LanguageService { + option (google.api.default_host) = "language.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-language," + "https://www.googleapis.com/auth/cloud-platform"; + // Analyzes the sentiment of the provided text. - rpc AnalyzeSentiment(AnalyzeSentimentRequest) - returns (AnalyzeSentimentResponse) { + rpc AnalyzeSentiment(AnalyzeSentimentRequest) returns (AnalyzeSentimentResponse) { option (google.api.http) = { post: "/v1beta2/documents:analyzeSentiment" body: "*" }; + option (google.api.method_signature) = "document,encoding_type"; + option (google.api.method_signature) = "document"; } // Finds named entities (currently proper names and common nouns) in the text // along with entity types, salience, mentions for each entity, and // other properties. - rpc AnalyzeEntities(AnalyzeEntitiesRequest) - returns (AnalyzeEntitiesResponse) { + rpc AnalyzeEntities(AnalyzeEntitiesRequest) returns (AnalyzeEntitiesResponse) { option (google.api.http) = { post: "/v1beta2/documents:analyzeEntities" body: "*" }; + option (google.api.method_signature) = "document,encoding_type"; + option (google.api.method_signature) = "document"; } - // Finds entities, similar to - // [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] - // in the text and analyzes sentiment associated with each entity and its - // mentions. - rpc AnalyzeEntitySentiment(AnalyzeEntitySentimentRequest) - returns (AnalyzeEntitySentimentResponse) { + // Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + // sentiment associated with each entity and its mentions. + rpc AnalyzeEntitySentiment(AnalyzeEntitySentimentRequest) returns (AnalyzeEntitySentimentResponse) { option (google.api.http) = { post: "/v1beta2/documents:analyzeEntitySentiment" body: "*" }; + option (google.api.method_signature) = "document,encoding_type"; + option (google.api.method_signature) = "document"; } // Analyzes the syntax of the text and provides sentence boundaries and @@ -69,6 +76,8 @@ service LanguageService { post: "/v1beta2/documents:analyzeSyntax" body: "*" }; + option (google.api.method_signature) = "document,encoding_type"; + option (google.api.method_signature) = "document"; } // Classifies a document into categories. @@ -77,6 +86,7 @@ service LanguageService { post: "/v1beta2/documents:classifyText" body: "*" }; + option (google.api.method_signature) = "document"; } // A convenience method that provides all syntax, sentiment, entity, and @@ -86,6 +96,8 @@ service LanguageService { post: "/v1beta2/documents:annotateText" body: "*" }; + option (google.api.method_signature) = "document,features,encoding_type"; + option (google.api.method_signature) = "document,features"; } } @@ -113,6 +125,7 @@ message Document { // Google Cloud Storage URI. oneof source { // The content of the input in string format. + // Cloud audit logging exempt since it is based on user data. string content = 2; // The Google Cloud Storage URI where the file content is located. @@ -139,8 +152,8 @@ message Sentence { TextSpan text = 1; // For calls to [AnalyzeSentiment][] or if - // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] - // is set to true, this field will contain the sentiment for the sentence. + // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to + // true, this field will contain the sentiment for the sentence. Sentiment sentiment = 2; } @@ -148,7 +161,10 @@ message Sentence { // a person, an organization, or location. The API associates information, such // as salience and mentions, with entities. message Entity { - // The type of the entity. + // The type of the entity. For most entity types, the associated metadata is a + // Wikipedia URL (`wikipedia_url`) and Knowledge Graph MID (`mid`). The table + // below lists the associated fields for entities that have different + // metadata. enum Type { // Unknown UNKNOWN = 0; @@ -165,14 +181,63 @@ message Entity { // Event EVENT = 4; - // Work of art + // Artwork WORK_OF_ART = 5; - // Consumer goods + // Consumer product CONSUMER_GOOD = 6; - // Other types + // Other types of entities OTHER = 7; + + // Phone number + // + // The metadata lists the phone number, formatted according to local + // convention, plus whichever additional elements appear in the text: + // + // * `number` - the actual number, broken down into sections as per local + // convention + // * `national_prefix` - country code, if detected + // * `area_code` - region or area code, if detected + // * `extension` - phone extension (to be dialed after connection), if + // detected + PHONE_NUMBER = 9; + + // Address + // + // The metadata identifies the street number and locality plus whichever + // additional elements appear in the text: + // + // * `street_number` - street number + // * `locality` - city or town + // * `street_name` - street/route name, if detected + // * `postal_code` - postal code, if detected + // * `country` - country, if detected< + // * `broad_region` - administrative area, such as the state, if detected + // * `narrow_region` - smaller administrative area, such as county, if + // detected + // * `sublocality` - used in Asian addresses to demark a district within a + // city, if detected + ADDRESS = 10; + + // Date + // + // The metadata identifies the components of the date: + // + // * `year` - four digit year, if detected + // * `month` - two digit month number, if detected + // * `day` - two digit day number, if detected + DATE = 11; + + // Number + // + // The metadata is the number itself. + NUMBER = 12; + + // Price + // + // The metadata identifies the `value` and `currency`. + PRICE = 13; } // The representative name for the entity. @@ -183,8 +248,9 @@ message Entity { // Metadata associated with the entity. // - // Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if - // available. The associated keys are "wikipedia_url" and "mid", respectively. + // For most entity types, the metadata is a Wikipedia URL (`wikipedia_url`) + // and Knowledge Graph MID (`mid`), if they are available. For the metadata + // associated with other entity types, see the Type table below. map metadata = 3; // The salience score associated with the entity in the [0, 1.0] range. @@ -200,12 +266,38 @@ message Entity { repeated EntityMention mentions = 5; // For calls to [AnalyzeEntitySentiment][] or if - // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] - // is set to true, this field will contain the aggregate sentiment expressed - // for this entity in the provided document. + // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] is set to + // true, this field will contain the aggregate sentiment expressed for this + // entity in the provided document. Sentiment sentiment = 6; } +// Represents the text encoding that the caller uses to process the output. +// Providing an `EncodingType` is recommended because the API provides the +// beginning offsets for various outputs, such as tokens and mentions, and +// languages that natively use different text encodings may access offsets +// differently. +enum EncodingType { + // If `EncodingType` is not specified, encoding-dependent information (such as + // `begin_offset`) will be set at `-1`. + NONE = 0; + + // Encoding-dependent information (such as `begin_offset`) is calculated based + // on the UTF-8 encoding of the input. C++ and Go are examples of languages + // that use this encoding natively. + UTF8 = 1; + + // Encoding-dependent information (such as `begin_offset`) is calculated based + // on the UTF-16 encoding of the input. Java and JavaScript are examples of + // languages that use this encoding natively. + UTF16 = 2; + + // Encoding-dependent information (such as `begin_offset`) is calculated based + // on the UTF-32 encoding of the input. Python is an example of a language + // that uses this encoding natively. + UTF32 = 3; +} + // Represents the smallest syntactic building block of the text. message Token { // The token text. @@ -223,6 +315,7 @@ message Token { // Represents the feeling associated with the entire text or entities in // the text. +// Next ID: 6 message Sentiment { // A non-negative number in the [0, +inf) range, which represents // the absolute magnitude of sentiment regardless of score (positive or @@ -849,9 +942,9 @@ message EntityMention { Type type = 2; // For calls to [AnalyzeEntitySentiment][] or if - // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] - // is set to true, this field will contain the sentiment expressed for this - // mention of the entity in the provided document. + // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] is set to + // true, this field will contain the sentiment expressed for this mention of + // the entity in the provided document. Sentiment sentiment = 3; } @@ -861,15 +954,14 @@ message TextSpan { string content = 1; // The API calculates the beginning offset of the content in the original - // document according to the - // [EncodingType][google.cloud.language.v1beta2.EncodingType] specified in the - // API request. + // document according to the [EncodingType][google.cloud.language.v1beta2.EncodingType] specified in the API request. int32 begin_offset = 2; } // Represents a category returned from the text classifier. message ClassificationCategory { - // The name of the category representing the document. + // The name of the category representing the document, from the [predefined + // taxonomy](/natural-language/docs/categories). string name = 1; // The classifier's confidence of the category. Number represents how certain @@ -879,8 +971,8 @@ message ClassificationCategory { // The sentiment analysis request message. message AnalyzeSentimentRequest { - // Input document. - Document document = 1; + // Required. Input document. + Document document = 1 [(google.api.field_behavior) = REQUIRED]; // The encoding type used by the API to calculate sentence offsets for the // sentence sentiment. @@ -894,8 +986,7 @@ message AnalyzeSentimentResponse { // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. - // See [Document.language][google.cloud.language.v1beta2.Document.language] - // field for more details. + // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details. string language = 2; // The sentiment for all the sentences in the document. @@ -904,8 +995,8 @@ message AnalyzeSentimentResponse { // The entity-level sentiment analysis request message. message AnalyzeEntitySentimentRequest { - // Input document. - Document document = 1; + // Required. Input document. + Document document = 1 [(google.api.field_behavior) = REQUIRED]; // The encoding type used by the API to calculate offsets. EncodingType encoding_type = 2; @@ -918,15 +1009,14 @@ message AnalyzeEntitySentimentResponse { // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. - // See [Document.language][google.cloud.language.v1beta2.Document.language] - // field for more details. + // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details. string language = 2; } // The entity analysis request message. message AnalyzeEntitiesRequest { - // Input document. - Document document = 1; + // Required. Input document. + Document document = 1 [(google.api.field_behavior) = REQUIRED]; // The encoding type used by the API to calculate offsets. EncodingType encoding_type = 2; @@ -939,15 +1029,14 @@ message AnalyzeEntitiesResponse { // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. - // See [Document.language][google.cloud.language.v1beta2.Document.language] - // field for more details. + // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details. string language = 2; } // The syntax analysis request message. message AnalyzeSyntaxRequest { - // Input document. - Document document = 1; + // Required. Input document. + Document document = 1 [(google.api.field_behavior) = REQUIRED]; // The encoding type used by the API to calculate offsets. EncodingType encoding_type = 2; @@ -963,15 +1052,14 @@ message AnalyzeSyntaxResponse { // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. - // See [Document.language][google.cloud.language.v1beta2.Document.language] - // field for more details. + // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details. string language = 3; } // The document classification request message. message ClassifyTextRequest { - // Input document. - Document document = 1; + // Required. Input document. + Document document = 1 [(google.api.field_behavior) = REQUIRED]; } // The document classification response message. @@ -985,6 +1073,7 @@ message ClassifyTextResponse { message AnnotateTextRequest { // All available features for sentiment, syntax, and semantic analysis. // Setting each one to true will enable that specific analysis for the input. + // Next ID: 10 message Features { // Extract syntax information. bool extract_syntax = 1; @@ -998,15 +1087,17 @@ message AnnotateTextRequest { // Extract entities and their associated sentiment. bool extract_entity_sentiment = 4; - // Classify the full document into categories. + // Classify the full document into categories. If this is true, + // the API will use the default model which classifies into a + // [predefined taxonomy](/natural-language/docs/categories). bool classify_text = 6; } - // Input document. - Document document = 1; + // Required. Input document. + Document document = 1 [(google.api.field_behavior) = REQUIRED]; - // The enabled features. - Features features = 2; + // Required. The enabled features. + Features features = 2 [(google.api.field_behavior) = REQUIRED]; // The encoding type used by the API to calculate offsets. EncodingType encoding_type = 3; @@ -1034,36 +1125,9 @@ message AnnotateTextResponse { // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. - // See [Document.language][google.cloud.language.v1beta2.Document.language] - // field for more details. + // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details. string language = 5; // Categories identified in the input document. repeated ClassificationCategory categories = 6; } - -// Represents the text encoding that the caller uses to process the output. -// Providing an `EncodingType` is recommended because the API provides the -// beginning offsets for various outputs, such as tokens and mentions, and -// languages that natively use different text encodings may access offsets -// differently. -enum EncodingType { - // If `EncodingType` is not specified, encoding-dependent information (such as - // `begin_offset`) will be set at `-1`. - NONE = 0; - - // Encoding-dependent information (such as `begin_offset`) is calculated based - // on the UTF-8 encoding of the input. C++ and Go are examples of languages - // that use this encoding natively. - UTF8 = 1; - - // Encoding-dependent information (such as `begin_offset`) is calculated based - // on the UTF-16 encoding of the input. Java and Javascript are examples of - // languages that use this encoding natively. - UTF16 = 2; - - // Encoding-dependent information (such as `begin_offset`) is calculated based - // on the UTF-32 encoding of the input. Python is an example of a language - // that uses this encoding natively. - UTF32 = 3; -} diff --git a/language/google/cloud/language_v1beta2/proto/language_service_pb2.py b/language/google/cloud/language_v1beta2/proto/language_service_pb2.py index 6e2ce20c6b5e..8c9068df2910 100644 --- a/language/google/cloud/language_v1beta2/proto/language_service_pb2.py +++ b/language/google/cloud/language_v1beta2/proto/language_service_pb2.py @@ -17,11 +17,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -32,13 +30,13 @@ "\n!com.google.cloud.language.v1beta2B\024LanguageServiceProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;language" ), serialized_pb=_b( - '\n:google/cloud/language_v1beta2/proto/language_service.proto\x12\x1dgoogle.cloud.language.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xc8\x01\n\x08\x44ocument\x12:\n\x04type\x18\x01 \x01(\x0e\x32,.google.cloud.language.v1beta2.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source"~\n\x08Sentence\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12;\n\tsentiment\x18\x02 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment"\xd2\x03\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x04type\x18\x02 \x01(\x0e\x32*.google.cloud.language.v1beta2.Entity.Type\x12\x45\n\x08metadata\x18\x03 \x03(\x0b\x32\x33.google.cloud.language.v1beta2.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12>\n\x08mentions\x18\x05 \x03(\x0b\x32,.google.cloud.language.v1beta2.EntityMention\x12;\n\tsentiment\x18\x06 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"y\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07"\xda\x01\n\x05Token\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12\x43\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32+.google.cloud.language.v1beta2.PartOfSpeech\x12\x46\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32-.google.cloud.language.v1beta2.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02"\xdf\x10\n\x0cPartOfSpeech\x12<\n\x03tag\x18\x01 \x01(\x0e\x32/.google.cloud.language.v1beta2.PartOfSpeech.Tag\x12\x42\n\x06\x61spect\x18\x02 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Aspect\x12>\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Case\x12>\n\x04\x66orm\x18\x04 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Form\x12\x42\n\x06gender\x18\x05 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Gender\x12>\n\x04mood\x18\x06 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Mood\x12\x42\n\x06number\x18\x07 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Number\x12\x42\n\x06person\x18\x08 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Person\x12\x42\n\x06proper\x18\t \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Proper\x12L\n\x0breciprocity\x18\n \x01(\x0e\x32\x37.google.cloud.language.v1beta2.PartOfSpeech.Reciprocity\x12@\n\x05tense\x18\x0b \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Tense\x12@\n\x05voice\x18\x0c \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Voice"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03"\x9a\x08\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12\x42\n\x05label\x18\x02 \x01(\x0e\x32\x33.google.cloud.language.v1beta2.DependencyEdge.Label"\xa9\x07\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\x12\x07\n\x03\x41SP\x10M\x12\x08\n\x04GMOD\x10N\x12\x08\n\x04GOBJ\x10O\x12\n\n\x06INFMOD\x10P\x12\x07\n\x03MES\x10Q\x12\t\n\x05NCOMP\x10R"\xf6\x01\n\rEntityMention\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12?\n\x04type\x18\x02 \x01(\x0e\x32\x31.google.cloud.language.v1beta2.EntityMention.Type\x12;\n\tsentiment\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05":\n\x16\x43lassificationCategory\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02"\x98\x01\n\x17\x41nalyzeSentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"\xae\x01\n\x18\x41nalyzeSentimentResponse\x12\x44\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12:\n\tsentences\x18\x03 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence"\x9e\x01\n\x1d\x41nalyzeEntitySentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"k\n\x1e\x41nalyzeEntitySentimentResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t"\x97\x01\n\x16\x41nalyzeEntitiesRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"d\n\x17\x41nalyzeEntitiesResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t"\x95\x01\n\x14\x41nalyzeSyntaxRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"\x9b\x01\n\x15\x41nalyzeSyntaxResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x10\n\x08language\x18\x03 \x01(\t"P\n\x13\x43lassifyTextRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document"a\n\x14\x43lassifyTextResponse\x12I\n\ncategories\x18\x01 \x03(\x0b\x32\x35.google.cloud.language.v1beta2.ClassificationCategory"\xff\x02\n\x13\x41nnotateTextRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12M\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32;.google.cloud.language.v1beta2.AnnotateTextRequest.Features\x12\x42\n\rencoding_type\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\x1a\x99\x01\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\x12 \n\x18\x65xtract_entity_sentiment\x18\x04 \x01(\x08\x12\x15\n\rclassify_text\x18\x06 \x01(\x08"\xe4\x02\n\x14\x41nnotateTextResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x37\n\x08\x65ntities\x18\x03 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x44\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t\x12I\n\ncategories\x18\x06 \x03(\x0b\x32\x35.google.cloud.language.v1beta2.ClassificationCategory*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\xbd\x08\n\x0fLanguageService\x12\xb3\x01\n\x10\x41nalyzeSentiment\x12\x36.google.cloud.language.v1beta2.AnalyzeSentimentRequest\x1a\x37.google.cloud.language.v1beta2.AnalyzeSentimentResponse".\x82\xd3\xe4\x93\x02("#/v1beta2/documents:analyzeSentiment:\x01*\x12\xaf\x01\n\x0f\x41nalyzeEntities\x12\x35.google.cloud.language.v1beta2.AnalyzeEntitiesRequest\x1a\x36.google.cloud.language.v1beta2.AnalyzeEntitiesResponse"-\x82\xd3\xe4\x93\x02\'""/v1beta2/documents:analyzeEntities:\x01*\x12\xcb\x01\n\x16\x41nalyzeEntitySentiment\x12<.google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest\x1a=.google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse"4\x82\xd3\xe4\x93\x02.")/v1beta2/documents:analyzeEntitySentiment:\x01*\x12\xa7\x01\n\rAnalyzeSyntax\x12\x33.google.cloud.language.v1beta2.AnalyzeSyntaxRequest\x1a\x34.google.cloud.language.v1beta2.AnalyzeSyntaxResponse"+\x82\xd3\xe4\x93\x02%" /v1beta2/documents:analyzeSyntax:\x01*\x12\xa3\x01\n\x0c\x43lassifyText\x12\x32.google.cloud.language.v1beta2.ClassifyTextRequest\x1a\x33.google.cloud.language.v1beta2.ClassifyTextResponse"*\x82\xd3\xe4\x93\x02$"\x1f/v1beta2/documents:classifyText:\x01*\x12\xa3\x01\n\x0c\x41nnotateText\x12\x32.google.cloud.language.v1beta2.AnnotateTextRequest\x1a\x33.google.cloud.language.v1beta2.AnnotateTextResponse"*\x82\xd3\xe4\x93\x02$"\x1f/v1beta2/documents:annotateText:\x01*B\x82\x01\n!com.google.cloud.language.v1beta2B\x14LanguageServiceProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;languageb\x06proto3' + '\n:google/cloud/language_v1beta2/proto/language_service.proto\x12\x1dgoogle.cloud.language.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc8\x01\n\x08\x44ocument\x12:\n\x04type\x18\x01 \x01(\x0e\x32,.google.cloud.language.v1beta2.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source"~\n\x08Sentence\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12;\n\tsentiment\x18\x02 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment"\x93\x04\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x04type\x18\x02 \x01(\x0e\x32*.google.cloud.language.v1beta2.Entity.Type\x12\x45\n\x08metadata\x18\x03 \x03(\x0b\x32\x33.google.cloud.language.v1beta2.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12>\n\x08mentions\x18\x05 \x03(\x0b\x32,.google.cloud.language.v1beta2.EntityMention\x12;\n\tsentiment\x18\x06 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb9\x01\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07\x12\x10\n\x0cPHONE_NUMBER\x10\t\x12\x0b\n\x07\x41\x44\x44RESS\x10\n\x12\x08\n\x04\x44\x41TE\x10\x0b\x12\n\n\x06NUMBER\x10\x0c\x12\t\n\x05PRICE\x10\r"\xda\x01\n\x05Token\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12\x43\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32+.google.cloud.language.v1beta2.PartOfSpeech\x12\x46\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32-.google.cloud.language.v1beta2.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02"\xdf\x10\n\x0cPartOfSpeech\x12<\n\x03tag\x18\x01 \x01(\x0e\x32/.google.cloud.language.v1beta2.PartOfSpeech.Tag\x12\x42\n\x06\x61spect\x18\x02 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Aspect\x12>\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Case\x12>\n\x04\x66orm\x18\x04 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Form\x12\x42\n\x06gender\x18\x05 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Gender\x12>\n\x04mood\x18\x06 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Mood\x12\x42\n\x06number\x18\x07 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Number\x12\x42\n\x06person\x18\x08 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Person\x12\x42\n\x06proper\x18\t \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Proper\x12L\n\x0breciprocity\x18\n \x01(\x0e\x32\x37.google.cloud.language.v1beta2.PartOfSpeech.Reciprocity\x12@\n\x05tense\x18\x0b \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Tense\x12@\n\x05voice\x18\x0c \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Voice"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03"\x9a\x08\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12\x42\n\x05label\x18\x02 \x01(\x0e\x32\x33.google.cloud.language.v1beta2.DependencyEdge.Label"\xa9\x07\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\x12\x07\n\x03\x41SP\x10M\x12\x08\n\x04GMOD\x10N\x12\x08\n\x04GOBJ\x10O\x12\n\n\x06INFMOD\x10P\x12\x07\n\x03MES\x10Q\x12\t\n\x05NCOMP\x10R"\xf6\x01\n\rEntityMention\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12?\n\x04type\x18\x02 \x01(\x0e\x32\x31.google.cloud.language.v1beta2.EntityMention.Type\x12;\n\tsentiment\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05":\n\x16\x43lassificationCategory\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02"\x9d\x01\n\x17\x41nalyzeSentimentRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"\xae\x01\n\x18\x41nalyzeSentimentResponse\x12\x44\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12:\n\tsentences\x18\x03 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence"\xa3\x01\n\x1d\x41nalyzeEntitySentimentRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"k\n\x1e\x41nalyzeEntitySentimentResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t"\x9c\x01\n\x16\x41nalyzeEntitiesRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"d\n\x17\x41nalyzeEntitiesResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t"\x9a\x01\n\x14\x41nalyzeSyntaxRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"\x9b\x01\n\x15\x41nalyzeSyntaxResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x10\n\x08language\x18\x03 \x01(\t"U\n\x13\x43lassifyTextRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02"a\n\x14\x43lassifyTextResponse\x12I\n\ncategories\x18\x01 \x03(\x0b\x32\x35.google.cloud.language.v1beta2.ClassificationCategory"\x89\x03\n\x13\x41nnotateTextRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12R\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32;.google.cloud.language.v1beta2.AnnotateTextRequest.FeaturesB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\x1a\x99\x01\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\x12 \n\x18\x65xtract_entity_sentiment\x18\x04 \x01(\x08\x12\x15\n\rclassify_text\x18\x06 \x01(\x08"\xe4\x02\n\x14\x41nnotateTextResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x37\n\x08\x65ntities\x18\x03 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x44\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t\x12I\n\ncategories\x18\x06 \x03(\x0b\x32\x35.google.cloud.language.v1beta2.ClassificationCategory*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\x8a\x0b\n\x0fLanguageService\x12\xd7\x01\n\x10\x41nalyzeSentiment\x12\x36.google.cloud.language.v1beta2.AnalyzeSentimentRequest\x1a\x37.google.cloud.language.v1beta2.AnalyzeSentimentResponse"R\x82\xd3\xe4\x93\x02("#/v1beta2/documents:analyzeSentiment:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xd3\x01\n\x0f\x41nalyzeEntities\x12\x35.google.cloud.language.v1beta2.AnalyzeEntitiesRequest\x1a\x36.google.cloud.language.v1beta2.AnalyzeEntitiesResponse"Q\x82\xd3\xe4\x93\x02\'""/v1beta2/documents:analyzeEntities:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xef\x01\n\x16\x41nalyzeEntitySentiment\x12<.google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest\x1a=.google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse"X\x82\xd3\xe4\x93\x02.")/v1beta2/documents:analyzeEntitySentiment:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xcb\x01\n\rAnalyzeSyntax\x12\x33.google.cloud.language.v1beta2.AnalyzeSyntaxRequest\x1a\x34.google.cloud.language.v1beta2.AnalyzeSyntaxResponse"O\x82\xd3\xe4\x93\x02%" /v1beta2/documents:analyzeSyntax:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xae\x01\n\x0c\x43lassifyText\x12\x32.google.cloud.language.v1beta2.ClassifyTextRequest\x1a\x33.google.cloud.language.v1beta2.ClassifyTextResponse"5\x82\xd3\xe4\x93\x02$"\x1f/v1beta2/documents:classifyText:\x01*\xda\x41\x08\x64ocument\x12\xd9\x01\n\x0c\x41nnotateText\x12\x32.google.cloud.language.v1beta2.AnnotateTextRequest\x1a\x33.google.cloud.language.v1beta2.AnnotateTextResponse"`\x82\xd3\xe4\x93\x02$"\x1f/v1beta2/documents:annotateText:\x01*\xda\x41\x1f\x64ocument,features,encoding_type\xda\x41\x11\x64ocument,features\x1az\xca\x41\x17language.googleapis.com\xd2\x41]https://www.googleapis.com/auth/cloud-language,https://www.googleapis.com/auth/cloud-platformB\x82\x01\n!com.google.cloud.language.v1beta2B\x14LanguageServiceProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;languageb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, ], ) @@ -63,8 +61,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6939, - serialized_end=6995, + serialized_start=7035, + serialized_end=7091, ) _sym_db.RegisterEnumDescriptor(_ENCODINGTYPE) @@ -97,8 +95,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=355, - serialized_end=409, + serialized_start=351, + serialized_end=405, ) _sym_db.RegisterEnumDescriptor(_DOCUMENT_TYPE) @@ -132,11 +130,26 @@ _descriptor.EnumValueDescriptor( name="OTHER", index=7, number=7, serialized_options=None, type=None ), + _descriptor.EnumValueDescriptor( + name="PHONE_NUMBER", index=8, number=9, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ADDRESS", index=9, number=10, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="DATE", index=10, number=11, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="NUMBER", index=11, number=12, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="PRICE", index=12, number=13, serialized_options=None, type=None + ), ], containing_type=None, serialized_options=None, - serialized_start=895, - serialized_end=1016, + serialized_start=892, + serialized_end=1077, ) _sym_db.RegisterEnumDescriptor(_ENTITY_TYPE) @@ -191,8 +204,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2108, - serialized_end=2249, + serialized_start=2169, + serialized_end=2310, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TAG) @@ -217,8 +230,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2251, - serialized_end=2330, + serialized_start=2312, + serialized_end=2391, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_ASPECT) @@ -288,8 +301,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2333, - serialized_end=2581, + serialized_start=2394, + serialized_end=2642, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_CASE) @@ -338,8 +351,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2584, - serialized_end=2759, + serialized_start=2645, + serialized_end=2820, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_FORM) @@ -364,8 +377,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2761, - serialized_end=2830, + serialized_start=2822, + serialized_end=2891, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_GENDER) @@ -403,8 +416,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2832, - serialized_end=2959, + serialized_start=2893, + serialized_end=3020, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_MOOD) @@ -429,8 +442,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2961, - serialized_end=3025, + serialized_start=3022, + serialized_end=3086, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_NUMBER) @@ -462,8 +475,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3027, - serialized_end=3111, + serialized_start=3088, + serialized_end=3172, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PERSON) @@ -485,8 +498,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3113, - serialized_end=3169, + serialized_start=3174, + serialized_end=3230, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PROPER) @@ -512,8 +525,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3171, - serialized_end=3245, + serialized_start=3232, + serialized_end=3306, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_RECIPROCITY) @@ -551,8 +564,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3247, - serialized_end=3362, + serialized_start=3308, + serialized_end=3423, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TENSE) @@ -577,8 +590,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3364, - serialized_end=3430, + serialized_start=3425, + serialized_end=3491, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_VOICE) @@ -840,8 +853,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3546, - serialized_end=4483, + serialized_start=3607, + serialized_end=4544, ) _sym_db.RegisterEnumDescriptor(_DEPENDENCYEDGE_LABEL) @@ -863,8 +876,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4684, - serialized_end=4732, + serialized_start=4745, + serialized_end=4793, ) _sym_db.RegisterEnumDescriptor(_ENTITYMENTION_TYPE) @@ -965,8 +978,8 @@ fields=[], ) ], - serialized_start=219, - serialized_end=419, + serialized_start=215, + serialized_end=415, ) @@ -1022,8 +1035,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=421, - serialized_end=547, + serialized_start=417, + serialized_end=543, ) @@ -1079,8 +1092,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=846, - serialized_end=893, + serialized_start=842, + serialized_end=889, ) _ENTITY = _descriptor.Descriptor( @@ -1207,8 +1220,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=550, - serialized_end=1016, + serialized_start=546, + serialized_end=1077, ) @@ -1300,8 +1313,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1019, - serialized_end=1237, + serialized_start=1080, + serialized_end=1298, ) @@ -1357,8 +1370,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1239, - serialized_end=1284, + serialized_start=1300, + serialized_end=1345, ) @@ -1607,8 +1620,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1287, - serialized_end=3430, + serialized_start=1348, + serialized_end=3491, ) @@ -1664,8 +1677,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3433, - serialized_end=4483, + serialized_start=3494, + serialized_end=4544, ) @@ -1739,8 +1752,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4486, - serialized_end=4732, + serialized_start=4547, + serialized_end=4793, ) @@ -1796,8 +1809,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4734, - serialized_end=4783, + serialized_start=4795, + serialized_end=4844, ) @@ -1853,8 +1866,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4785, - serialized_end=4843, + serialized_start=4846, + serialized_end=4904, ) @@ -1880,7 +1893,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1910,8 +1923,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4846, - serialized_end=4998, + serialized_start=4907, + serialized_end=5064, ) @@ -1985,8 +1998,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5001, - serialized_end=5175, + serialized_start=5067, + serialized_end=5241, ) @@ -2012,7 +2025,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2042,8 +2055,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5178, - serialized_end=5336, + serialized_start=5244, + serialized_end=5407, ) @@ -2099,8 +2112,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5338, - serialized_end=5445, + serialized_start=5409, + serialized_end=5516, ) @@ -2126,7 +2139,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2156,8 +2169,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5448, - serialized_end=5599, + serialized_start=5519, + serialized_end=5675, ) @@ -2213,8 +2226,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5601, - serialized_end=5701, + serialized_start=5677, + serialized_end=5777, ) @@ -2240,7 +2253,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2270,8 +2283,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5704, - serialized_end=5853, + serialized_start=5780, + serialized_end=5934, ) @@ -2345,8 +2358,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5856, - serialized_end=6011, + serialized_start=5937, + serialized_end=6092, ) @@ -2372,7 +2385,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -2384,8 +2397,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6013, - serialized_end=6093, + serialized_start=6094, + serialized_end=6179, ) @@ -2423,8 +2436,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6095, - serialized_end=6192, + serialized_start=6181, + serialized_end=6278, ) @@ -2534,8 +2547,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6425, - serialized_end=6578, + serialized_start=6521, + serialized_end=6674, ) _ANNOTATETEXTREQUEST = _descriptor.Descriptor( @@ -2560,7 +2573,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2578,7 +2591,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2608,8 +2621,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6195, - serialized_end=6578, + serialized_start=6281, + serialized_end=6674, ) @@ -2737,8 +2750,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6581, - serialized_end=6937, + serialized_start=6677, + serialized_end=7033, ) _DOCUMENT.fields_by_name["type"].enum_type = _DOCUMENT_TYPE @@ -2871,7 +2884,8 @@ The source of the document: a string containing the content or a Google Cloud Storage URI. content: - The content of the input in string format. + The content of the input in string format. Cloud audit logging + exempt since it is based on user data. gcs_content_uri: The Google Cloud Storage URI where the file content is located. This URI must be of the form: @@ -2943,9 +2957,11 @@ type: The entity type. metadata: - Metadata associated with the entity. Currently, Wikipedia - URLs and Knowledge Graph MIDs are provided, if available. The - associated keys are "wikipedia\_url" and "mid", respectively. + Metadata associated with the entity. For most entity types, + the metadata is a Wikipedia URL (``wikipedia_url``) and + Knowledge Graph MID (``mid``), if they are available. For the + metadata associated with other entity types, see the Type + table below. salience: The salience score associated with the entity in the [0, 1.0] range. The salience score for an entity provides information @@ -3001,7 +3017,7 @@ DESCRIPTOR=_SENTIMENT, __module__="google.cloud.language_v1beta2.proto.language_service_pb2", __doc__="""Represents the feeling associated with the entire text or entities in - the text. + the text. Next ID: 6 Attributes: @@ -3144,7 +3160,8 @@ Attributes: name: - The name of the category representing the document. + The name of the category representing the document, from the + `predefined taxonomy `__. confidence: The classifier's confidence of the category. Number represents how certain the classifier is that this category represents @@ -3166,7 +3183,7 @@ Attributes: document: - Input document. + Required. Input document. encoding_type: The encoding type used by the API to calculate sentence offsets for the sentence sentiment. @@ -3213,7 +3230,7 @@ Attributes: document: - Input document. + Required. Input document. encoding_type: The encoding type used by the API to calculate offsets. """, @@ -3258,7 +3275,7 @@ Attributes: document: - Input document. + Required. Input document. encoding_type: The encoding type used by the API to calculate offsets. """, @@ -3302,7 +3319,7 @@ Attributes: document: - Input document. + Required. Input document. encoding_type: The encoding type used by the API to calculate offsets. """, @@ -3349,7 +3366,7 @@ Attributes: document: - Input document. + Required. Input document. """, # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.ClassifyTextRequest) ), @@ -3386,7 +3403,7 @@ __module__="google.cloud.language_v1beta2.proto.language_service_pb2", __doc__="""All available features for sentiment, syntax, and semantic analysis. Setting each one to true will enable that specific analysis for the - input. + input. Next ID: 10 Attributes: @@ -3399,7 +3416,9 @@ extract_entity_sentiment: Extract entities and their associated sentiment. classify_text: - Classify the full document into categories. + Classify the full document into categories. If this is true, + the API will use the default model which classifies into a + `predefined taxonomy `__. """, # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnnotateTextRequest.Features) ), @@ -3412,9 +3431,9 @@ Attributes: document: - Input document. + Required. Input document. features: - The enabled features. + Required. The enabled features. encoding_type: The encoding type used by the API to calculate offsets. """, @@ -3470,15 +3489,24 @@ DESCRIPTOR._options = None _ENTITY_METADATAENTRY._options = None +_ANALYZESENTIMENTREQUEST.fields_by_name["document"]._options = None +_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name["document"]._options = None +_ANALYZEENTITIESREQUEST.fields_by_name["document"]._options = None +_ANALYZESYNTAXREQUEST.fields_by_name["document"]._options = None +_CLASSIFYTEXTREQUEST.fields_by_name["document"]._options = None +_ANNOTATETEXTREQUEST.fields_by_name["document"]._options = None +_ANNOTATETEXTREQUEST.fields_by_name["features"]._options = None _LANGUAGESERVICE = _descriptor.ServiceDescriptor( name="LanguageService", full_name="google.cloud.language.v1beta2.LanguageService", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=6998, - serialized_end=8083, + serialized_options=_b( + "\312A\027language.googleapis.com\322A]https://www.googleapis.com/auth/cloud-language,https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=7094, + serialized_end=8512, methods=[ _descriptor.MethodDescriptor( name="AnalyzeSentiment", @@ -3488,7 +3516,7 @@ input_type=_ANALYZESENTIMENTREQUEST, output_type=_ANALYZESENTIMENTRESPONSE, serialized_options=_b( - '\202\323\344\223\002("#/v1beta2/documents:analyzeSentiment:\001*' + '\202\323\344\223\002("#/v1beta2/documents:analyzeSentiment:\001*\332A\026document,encoding_type\332A\010document' ), ), _descriptor.MethodDescriptor( @@ -3499,7 +3527,7 @@ input_type=_ANALYZEENTITIESREQUEST, output_type=_ANALYZEENTITIESRESPONSE, serialized_options=_b( - '\202\323\344\223\002\'""/v1beta2/documents:analyzeEntities:\001*' + '\202\323\344\223\002\'""/v1beta2/documents:analyzeEntities:\001*\332A\026document,encoding_type\332A\010document' ), ), _descriptor.MethodDescriptor( @@ -3510,7 +3538,7 @@ input_type=_ANALYZEENTITYSENTIMENTREQUEST, output_type=_ANALYZEENTITYSENTIMENTRESPONSE, serialized_options=_b( - '\202\323\344\223\002.")/v1beta2/documents:analyzeEntitySentiment:\001*' + '\202\323\344\223\002.")/v1beta2/documents:analyzeEntitySentiment:\001*\332A\026document,encoding_type\332A\010document' ), ), _descriptor.MethodDescriptor( @@ -3521,7 +3549,7 @@ input_type=_ANALYZESYNTAXREQUEST, output_type=_ANALYZESYNTAXRESPONSE, serialized_options=_b( - '\202\323\344\223\002%" /v1beta2/documents:analyzeSyntax:\001*' + '\202\323\344\223\002%" /v1beta2/documents:analyzeSyntax:\001*\332A\026document,encoding_type\332A\010document' ), ), _descriptor.MethodDescriptor( @@ -3532,7 +3560,7 @@ input_type=_CLASSIFYTEXTREQUEST, output_type=_CLASSIFYTEXTRESPONSE, serialized_options=_b( - '\202\323\344\223\002$"\037/v1beta2/documents:classifyText:\001*' + '\202\323\344\223\002$"\037/v1beta2/documents:classifyText:\001*\332A\010document' ), ), _descriptor.MethodDescriptor( @@ -3543,7 +3571,7 @@ input_type=_ANNOTATETEXTREQUEST, output_type=_ANNOTATETEXTRESPONSE, serialized_options=_b( - '\202\323\344\223\002$"\037/v1beta2/documents:annotateText:\001*' + '\202\323\344\223\002$"\037/v1beta2/documents:annotateText:\001*\332A\037document,features,encoding_type\332A\021document,features' ), ), ], diff --git a/language/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py b/language/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py index da4223708164..e0e1e4124606 100644 --- a/language/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py +++ b/language/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py @@ -71,10 +71,8 @@ def AnalyzeEntities(self, request, context): raise NotImplementedError("Method not implemented!") def AnalyzeEntitySentiment(self, request, context): - """Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity and its - mentions. + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/language/synth.metadata b/language/synth.metadata index d0b76fc8158c..8e564ada0f86 100644 --- a/language/synth.metadata +++ b/language/synth.metadata @@ -1,11 +1,19 @@ { - "updateTime": "2019-08-29T22:35:01.571061Z", + "updateTime": "2019-10-01T12:29:45.277286Z", "sources": [ { "generator": { "name": "artman", - "version": "0.35.1", - "dockerImage": "googleapis/artman@sha256:b11c7ea0d0831c54016fb50f4b796d24d1971439b30fbc32a369ba1ac887c384" + "version": "0.37.1", + "dockerImage": "googleapis/artman@sha256:6068f67900a3f0bdece596b97bda8fc70406ca0e137a941f4c81d3217c994a80" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "ce3c574d1266026cebea3a893247790bd68191c2", + "internalRef": "272147209" } }, { diff --git a/language/synth.py b/language/synth.py index bee382de3f39..db33996ab084 100644 --- a/language/synth.py +++ b/language/synth.py @@ -45,7 +45,7 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(unit_cov_level=97, cov_level=100) -s.move(templated_files) +s.move(templated_files, excludes=['noxfile.py']) s.replace( f"google/cloud/**/gapic/language_service_client.py", diff --git a/logging/CHANGELOG.md b/logging/CHANGELOG.md index f7ad1b7451ed..05caf8d580b8 100644 --- a/logging/CHANGELOG.md +++ b/logging/CHANGELOG.md @@ -4,6 +4,35 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.14.0 + +10-15-2019 06:50 PDT + + +### Implementation Changes +- Fix proto copy. ([#9420](https://github.com/googleapis/google-cloud-python/pull/9420)) + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +## 1.13.0 + +09-23-2019 10:00 PDT + +### Implementation Changes +- Pass 'stream' argument to super in 'ContainerEngineHandler.__init__'. ([#9166](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9166)) + +### New Features +- Add LoggingV2Servicer, LogSinks, logging_metrics, and log_entry. Add LogSeverity and HttpRequest types (via synth). ([#9262](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9262)) +- Add client_options to logging v1 ([#9046](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9046)) + +### Documentation +- Remove compatability badges from READMEs. ([#9035](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9035)) + +### Internal / Testing Changes +- Docs: Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9085)) +- Delete custom synth removing gRPC send/recv msg size limits. ([#8939](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8939)) + ## 1.12.1 08-01-2019 09:45 PDT diff --git a/logging/docs/conf.py b/logging/docs/conf.py index da2faa63ec4f..524c564a1698 100644 --- a/logging/docs/conf.py +++ b/logging/docs/conf.py @@ -342,7 +342,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), } diff --git a/logging/docs/gapic/v2/api.rst b/logging/docs/gapic/v2/api.rst new file mode 100644 index 000000000000..2dc6bf6fcc6b --- /dev/null +++ b/logging/docs/gapic/v2/api.rst @@ -0,0 +1,6 @@ +Client for Stackdriver Logging API +================================== + +.. automodule:: google.cloud.logging_v2 + :members: + :inherited-members: \ No newline at end of file diff --git a/logging/docs/gapic/v2/types.rst b/logging/docs/gapic/v2/types.rst new file mode 100644 index 000000000000..5521d4f9bc12 --- /dev/null +++ b/logging/docs/gapic/v2/types.rst @@ -0,0 +1,5 @@ +Types for Stackdriver Logging API Client +======================================== + +.. automodule:: google.cloud.logging_v2.types + :members: \ No newline at end of file diff --git a/logging/docs/index.rst b/logging/docs/index.rst index 67ad362dfc69..f617201a90ab 100644 --- a/logging/docs/index.rst +++ b/logging/docs/index.rst @@ -1,29 +1,12 @@ .. include:: README.rst -Usage Documentation +Documentation ------------------- .. toctree:: - :maxdepth: 2 - - usage - -Api Reference -------------- -.. toctree:: - :maxdepth: 2 + :maxdepth: 3 - client - logger - entries - metric - sink - stdlib-usage - handlers - handlers-app-engine - handlers-container-engine - transports-sync - transports-thread - transports-base + v1 + v2 Changelog ~~~~~~~~~ diff --git a/logging/docs/usage.rst b/logging/docs/usage.rst index 122a850fecba..f5662bcbaa08 100644 --- a/logging/docs/usage.rst +++ b/logging/docs/usage.rst @@ -1,3 +1,6 @@ +Usage Guide +=========== + Writing log entries ------------------- diff --git a/logging/docs/v1.rst b/logging/docs/v1.rst new file mode 100644 index 000000000000..f4f79d377a65 --- /dev/null +++ b/logging/docs/v1.rst @@ -0,0 +1,18 @@ +v1 +============== +.. toctree:: + :maxdepth: 2 + + usage + client + logger + entries + metric + sink + stdlib-usage + handlers + handlers-app-engine + handlers-container-engine + transports-sync + transports-thread + transports-base \ No newline at end of file diff --git a/logging/docs/v2.rst b/logging/docs/v2.rst new file mode 100644 index 000000000000..8dfc18b48171 --- /dev/null +++ b/logging/docs/v2.rst @@ -0,0 +1,7 @@ +v2 +---------------- +.. toctree:: + :maxdepth: 2 + + gapic/v2/api + gapic/v2/types \ No newline at end of file diff --git a/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index df942566a397..18ed3c277435 100644 --- a/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -31,27 +31,20 @@ import google.api_core.path_template import grpc -from google.api import monitored_resource_pb2 from google.cloud.logging_v2.gapic import config_service_v2_client_config from google.cloud.logging_v2.gapic import enums from google.cloud.logging_v2.gapic.transports import config_service_v2_grpc_transport -from google.cloud.logging_v2.proto import log_entry_pb2 from google.cloud.logging_v2.proto import logging_config_pb2 from google.cloud.logging_v2.proto import logging_config_pb2_grpc -from google.cloud.logging_v2.proto import logging_pb2 -from google.cloud.logging_v2.proto import logging_pb2_grpc from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version class ConfigServiceV2Client(object): - """ - Service for configuring sinks used to export log entries out of - Logging. - """ + """Service for configuring sinks used to route log entries.""" SERVICE_ADDRESS = "logging.googleapis.com:443" """The default address of the service.""" @@ -84,7 +77,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account + "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod @@ -117,7 +110,7 @@ def exclusion_path(cls, project, exclusion): @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder) + return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def folder_exclusion_path(cls, folder, exclusion): @@ -132,14 +125,14 @@ def folder_exclusion_path(cls, folder, exclusion): def folder_sink_path(cls, folder, sink): """Return a fully-qualified folder_sink string.""" return google.api_core.path_template.expand( - "folders/{folder}/sinks/{sink}", folder=folder, sink=sink + "folders/{folder}/sinks/{sink}", folder=folder, sink=sink, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod @@ -164,14 +157,14 @@ def organization_sink_path(cls, organization, sink): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) @classmethod def sink_path(cls, project, sink): """Return a fully-qualified sink string.""" return google.api_core.path_template.expand( - "projects/{project}/sinks/{sink}", project=project, sink=sink + "projects/{project}/sinks/{sink}", project=project, sink=sink, ) def __init__( @@ -261,12 +254,12 @@ def __init__( self.transport = transport else: self.transport = config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -277,7 +270,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -367,7 +360,7 @@ def list_sinks( ) request = logging_config_pb2.ListSinksRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -457,7 +450,7 @@ def get_sink( client_info=self._client_info, ) - request = logging_config_pb2.GetSinkRequest(sink_name=sink_name) + request = logging_config_pb2.GetSinkRequest(sink_name=sink_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -561,7 +554,7 @@ def create_sink( ) request = logging_config_pb2.CreateSinkRequest( - parent=parent, sink=sink, unique_writer_identity=unique_writer_identity + parent=parent, sink=sink, unique_writer_identity=unique_writer_identity, ) if metadata is None: metadata = [] @@ -593,8 +586,10 @@ def update_sink( """ Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and - ``filter``. The updated sink might also have a new ``writer_identity``; - see the ``unique_writer_identity`` field. + ``filter``. + + The updated sink might also have a new ``writer_identity``; see the + ``unique_writer_identity`` field. Example: >>> from google.cloud import logging_v2 @@ -625,11 +620,10 @@ def update_sink( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogSink` - unique_writer_identity (bool): Optional. See - `sinks.create `__ - for a description of this field. When updating a sink, the effect of - this field on the value of ``writer_identity`` in the updated sink - depends on both the old and new values of this field: + unique_writer_identity (bool): Optional. See ``sinks.create`` for a description of this field. When + updating a sink, the effect of this field on the value of + ``writer_identity`` in the updated sink depends on both the old and new + values of this field: - If the old and new values of this field are both false or both true, then there is no change to the sink's ``writer_identity``. @@ -765,7 +759,7 @@ def delete_sink( client_info=self._client_info, ) - request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name) + request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -863,7 +857,7 @@ def list_exclusions( ) request = logging_config_pb2.ListExclusionsRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -953,7 +947,7 @@ def get_exclusion( client_info=self._client_info, ) - request = logging_config_pb2.GetExclusionRequest(name=name) + request = logging_config_pb2.GetExclusionRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1044,7 +1038,7 @@ def create_exclusion( ) request = logging_config_pb2.CreateExclusionRequest( - parent=parent, exclusion=exclusion + parent=parent, exclusion=exclusion, ) if metadata is None: metadata = [] @@ -1106,10 +1100,11 @@ def update_exclusion( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.logging_v2.types.LogExclusion` - update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Required. A nonempty list of fields to change in the existing exclusion. - New values for the fields are taken from the corresponding fields in the - ``LogExclusion`` included in this request. Fields not mentioned in - ``update_mask`` are not changed and are ignored in the request. + update_mask (Union[dict, ~google.cloud.logging_v2.types.FieldMask]): Required. A non-empty list of fields to change in the existing + exclusion. New values for the fields are taken from the corresponding + fields in the ``LogExclusion`` included in this request. Fields not + mentioned in ``update_mask`` are not changed and are ignored in the + request. For example, to change the filter and description of an exclusion, specify an ``update_mask`` of ``"filter,description"``. @@ -1147,7 +1142,7 @@ def update_exclusion( ) request = logging_config_pb2.UpdateExclusionRequest( - name=name, exclusion=exclusion, update_mask=update_mask + name=name, exclusion=exclusion, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -1223,7 +1218,7 @@ def delete_exclusion( client_info=self._client_info, ) - request = logging_config_pb2.DeleteExclusionRequest(name=name) + request = logging_config_pb2.DeleteExclusionRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 22aa07c592ae..778ba747d83c 100644 --- a/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -36,12 +36,15 @@ from google.cloud.logging_v2.gapic import logging_service_v2_client_config from google.cloud.logging_v2.gapic.transports import logging_service_v2_grpc_transport from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2_grpc from google.cloud.logging_v2.proto import logging_pb2 from google.cloud.logging_v2.proto import logging_pb2_grpc from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version class LoggingServiceV2Client(object): @@ -78,7 +81,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account + "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod @@ -93,27 +96,27 @@ def billing_log_path(cls, billing_account, log): @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder) + return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def folder_log_path(cls, folder, log): """Return a fully-qualified folder_log string.""" return google.api_core.path_template.expand( - "folders/{folder}/logs/{log}", folder=folder, log=log + "folders/{folder}/logs/{log}", folder=folder, log=log, ) @classmethod def log_path(cls, project, log): """Return a fully-qualified log string.""" return google.api_core.path_template.expand( - "projects/{project}/logs/{log}", project=project, log=log + "projects/{project}/logs/{log}", project=project, log=log, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod @@ -129,7 +132,7 @@ def organization_log_path(cls, organization, log): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) def __init__( @@ -219,12 +222,12 @@ def __init__( self.transport = transport else: self.transport = logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -235,7 +238,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -308,7 +311,7 @@ def delete_log( client_info=self._client_info, ) - request = logging_pb2.DeleteLogRequest(log_name=log_name) + request = logging_pb2.DeleteLogRequest(log_name=log_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -374,8 +377,8 @@ def write_log_entries( Log entries with timestamps that are more than the `logs retention period `__ in the past or more than 24 hours in the future will not be available when calling - ``entries.list``. However, those log entries can still be exported with - `LogSinks `__. + ``entries.list``. However, those log entries can still be `exported with + LogSinks `__. To improve throughput and to avoid exceeding the `quota limit `__ for calls to @@ -486,8 +489,9 @@ def list_log_entries( metadata=None, ): """ - Lists log entries. Use this method to retrieve log entries from Logging. - For ways to export log entries, see `Exporting + Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. For ways + to export log entries, see `Exporting Logs `__. Example: @@ -526,9 +530,7 @@ def list_log_entries( Projects listed in the ``project_ids`` field are added to this list. project_ids (list[str]): Deprecated. Use ``resource_names`` instead. One or more project identifiers or project numbers from which to retrieve log entries. - Example: ``"my-project-1A"``. If present, these project identifiers are - converted to resource name format and added to the list of resources in - ``resource_names``. + Example: ``"my-project-1A"``. filter_ (str): Optional. A filter that chooses which log entries to return. See `Advanced Logs Filters `__. @@ -677,7 +679,7 @@ def list_monitored_resource_descriptors( ) request = logging_pb2.ListMonitoredResourceDescriptorsRequest( - page_size=page_size + page_size=page_size, ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, @@ -774,7 +776,7 @@ def list_logs( client_info=self._client_info, ) - request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size) + request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 59dae9c7a78f..278f1365153d 100644 --- a/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -46,7 +46,7 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version class MetricsServiceV2Client(object): @@ -83,33 +83,33 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account + "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder) + return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def metric_path(cls, project, metric): """Return a fully-qualified metric string.""" return google.api_core.path_template.expand( - "projects/{project}/metrics/{metric}", project=project, metric=metric + "projects/{project}/metrics/{metric}", project=project, metric=metric, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) def __init__( @@ -199,12 +199,12 @@ def __init__( self.transport = transport else: self.transport = metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -215,7 +215,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -302,7 +302,7 @@ def list_log_metrics( ) request = logging_metrics_pb2.ListLogMetricsRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -387,7 +387,7 @@ def get_log_metric( client_info=self._client_info, ) - request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name) + request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -472,7 +472,7 @@ def create_log_metric( ) request = logging_metrics_pb2.CreateLogMetricRequest( - parent=parent, metric=metric + parent=parent, metric=metric, ) if metadata is None: metadata = [] @@ -559,7 +559,7 @@ def update_log_metric( ) request = logging_metrics_pb2.UpdateLogMetricRequest( - metric_name=metric_name, metric=metric + metric_name=metric_name, metric=metric, ) if metadata is None: metadata = [] @@ -630,7 +630,7 @@ def delete_log_metric( client_info=self._client_info, ) - request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name) + request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index 8157764ec872..b85abcd58a78 100644 --- a/logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -59,7 +59,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -80,7 +80,7 @@ def __init__( self._stubs = { "config_service_v2_stub": logging_config_pb2_grpc.ConfigServiceV2Stub( channel - ) + ), } @classmethod @@ -163,8 +163,10 @@ def update_sink(self): Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: ``destination``, and - ``filter``. The updated sink might also have a new ``writer_identity``; - see the ``unique_writer_identity`` field. + ``filter``. + + The updated sink might also have a new ``writer_identity``; see the + ``unique_writer_identity`` field. Returns: Callable: A callable which accepts the appropriate diff --git a/logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index caa20c480225..f6ab3ab8876c 100644 --- a/logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -59,7 +59,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -78,7 +78,7 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { - "logging_service_v2_stub": logging_pb2_grpc.LoggingServiceV2Stub(channel) + "logging_service_v2_stub": logging_pb2_grpc.LoggingServiceV2Stub(channel), } @classmethod @@ -152,8 +152,9 @@ def write_log_entries(self): def list_log_entries(self): """Return the gRPC stub for :meth:`LoggingServiceV2Client.list_log_entries`. - Lists log entries. Use this method to retrieve log entries from Logging. - For ways to export log entries, see `Exporting + Lists log entries. Use this method to retrieve log entries that + originated from a project/folder/organization/billing account. For ways + to export log entries, see `Exporting Logs `__. Returns: diff --git a/logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py index 426edce6edd7..bc66722729bb 100644 --- a/logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ b/logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py @@ -59,7 +59,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -80,7 +80,7 @@ def __init__( self._stubs = { "metrics_service_v2_stub": logging_metrics_pb2_grpc.MetricsServiceV2Stub( channel - ) + ), } @classmethod diff --git a/logging/google/cloud/logging_v2/proto/http_request.proto b/logging/google/cloud/logging_v2/proto/http_request.proto deleted file mode 100644 index 21b1367ab8ca..000000000000 --- a/logging/google/cloud/logging_v2/proto/http_request.proto +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2018 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.logging.type; - -import "google/api/annotations.proto"; -import "google/protobuf/duration.proto"; - -option csharp_namespace = "Google.Cloud.Logging.Type"; -option go_package = "google.golang.org/genproto/googleapis/logging/type;ltype"; -option java_multiple_files = true; -option java_outer_classname = "HttpRequestProto"; -option java_package = "com.google.logging.type"; -option php_namespace = "Google\\Cloud\\Logging\\Type"; - - -// A common proto for logging HTTP requests. Only contains semantics -// defined by the HTTP specification. Product-specific logging -// information MUST be defined in a separate message. -message HttpRequest { - // The request method. Examples: `"GET"`, `"HEAD"`, `"PUT"`, `"POST"`. - string request_method = 1; - - // The scheme (http, https), the host name, the path and the query - // portion of the URL that was requested. - // Example: `"http://example.com/some/info?color=red"`. - string request_url = 2; - - // The size of the HTTP request message in bytes, including the request - // headers and the request body. - int64 request_size = 3; - - // The response code indicating the status of response. - // Examples: 200, 404. - int32 status = 4; - - // The size of the HTTP response message sent back to the client, in bytes, - // including the response headers and the response body. - int64 response_size = 5; - - // The user agent sent by the client. Example: - // `"Mozilla/4.0 (compatible; MSIE 6.0; Windows 98; Q312461; .NET CLR 1.0.3705)"`. - string user_agent = 6; - - // The IP address (IPv4 or IPv6) of the client that issued the HTTP - // request. Examples: `"192.168.1.1"`, `"FE80::0202:B3FF:FE1E:8329"`. - string remote_ip = 7; - - // The IP address (IPv4 or IPv6) of the origin server that the request was - // sent to. - string server_ip = 13; - - // The referer URL of the request, as defined in - // [HTTP/1.1 Header Field Definitions](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html). - string referer = 8; - - // The request processing latency on the server, from the time the request was - // received until the response was sent. - google.protobuf.Duration latency = 14; - - // Whether or not a cache lookup was attempted. - bool cache_lookup = 11; - - // Whether or not an entity was served from cache - // (with or without validation). - bool cache_hit = 9; - - // Whether or not the response was validated with the origin server before - // being served from cache. This field is only meaningful if `cache_hit` is - // True. - bool cache_validated_with_origin_server = 10; - - // The number of HTTP response bytes inserted into cache. Set only when a - // cache fill was attempted. - int64 cache_fill_bytes = 12; - - // Protocol used for the request. Examples: "HTTP/1.1", "HTTP/2", "websocket" - string protocol = 15; -} diff --git a/logging/google/cloud/logging_v2/proto/log_entry.proto b/logging/google/cloud/logging_v2/proto/log_entry.proto index de9786daf733..f0b037545199 100644 --- a/logging/google/cloud/logging_v2/proto/log_entry.proto +++ b/logging/google/cloud/logging_v2/proto/log_entry.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,13 +17,14 @@ syntax = "proto3"; package google.logging.v2; -import "google/api/annotations.proto"; import "google/api/monitored_resource.proto"; import "google/logging/type/http_request.proto"; import "google/logging/type/log_severity.proto"; import "google/protobuf/any.proto"; import "google/protobuf/struct.proto"; import "google/protobuf/timestamp.proto"; +import "google/rpc/status.proto"; +import "google/api/annotations.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Logging.V2"; @@ -34,6 +35,7 @@ option java_package = "com.google.logging.v2"; option php_namespace = "Google\\Cloud\\Logging\\V2"; // An individual entry in a log. +// message LogEntry { // Required. The resource name of the log to which this log entry belongs: // @@ -42,9 +44,9 @@ message LogEntry { // "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" // "folders/[FOLDER_ID]/logs/[LOG_ID]" // - // A project number may optionally be used in place of PROJECT_ID. The - // project number is translated to its corresponding PROJECT_ID internally - // and the `log_name` field will contain PROJECT_ID in queries and exports. + // A project number may optionally be used in place of PROJECT_ID. The project + // number is translated to its corresponding PROJECT_ID internally and the + // `log_name` field will contain PROJECT_ID in queries and exports. // // `[LOG_ID]` must be URL-encoded within `log_name`. Example: // `"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"`. @@ -59,17 +61,23 @@ message LogEntry { // any results. string log_name = 12; - // Required. The primary monitored resource associated with this log entry. - // Example: a log entry that reports a database error would be - // associated with the monitored resource designating the particular - // database that reported the error. + // Required. The monitored resource that produced this log entry. + // + // Example: a log entry that reports a database error would be associated with + // the monitored resource designating the particular database that reported + // the error. google.api.MonitoredResource resource = 8; // Optional. The log entry payload, which can be one of multiple types. oneof payload { - // The log entry payload, represented as a protocol buffer. Some - // Google Cloud Platform services use this field for their log - // entry payloads. + // The log entry payload, represented as a protocol buffer. Some Google + // Cloud Platform services use this field for their log entry payloads. + // + // The following protocol buffer types are supported; user-defined types + // are not supported: + // + // "type.googleapis.com/google.cloud.audit.AuditLog" + // "type.googleapis.com/google.appengine.logging.v1.RequestLog" google.protobuf.Any proto_payload = 2; // The log entry payload, represented as a Unicode string (UTF-8). @@ -80,19 +88,18 @@ message LogEntry { google.protobuf.Struct json_payload = 6; } - // Optional. The time the event described by the log entry occurred. - // This time is used to compute the log entry's age and to enforce - // the logs retention period. If this field is omitted in a new log - // entry, then Logging assigns it the current time. - // Timestamps have nanosecond accuracy, but trailing zeros in the fractional - // seconds might be omitted when the timestamp is displayed. + // Optional. The time the event described by the log entry occurred. This + // time is used to compute the log entry's age and to enforce the logs + // retention period. If this field is omitted in a new log entry, then Logging + // assigns it the current time. Timestamps have nanosecond accuracy, but + // trailing zeros in the fractional seconds might be omitted when the + // timestamp is displayed. // - // Incoming log entries should have timestamps that are no more than - // the [logs retention period](/logging/quotas) in the past, - // and no more than 24 hours in the future. Log entries outside those time - // boundaries will not be available when calling `entries.list`, but - // those log entries can still be exported with - // [LogSinks](/logging/docs/api/tasks/exporting-logs). + // Incoming log entries should have timestamps that are no more than the [logs + // retention period](/logging/quotas) in the past, and no more than 24 hours + // in the future. Log entries outside those time boundaries will not be + // available when calling `entries.list`, but those log entries can still be + // [exported with LogSinks](/logging/docs/api/tasks/exporting-logs). google.protobuf.Timestamp timestamp = 9; // Output only. The time the log entry was received by Logging. @@ -103,25 +110,31 @@ message LogEntry { google.logging.type.LogSeverity severity = 10; // Optional. A unique identifier for the log entry. If you provide a value, - // then Logging considers other log entries in the same project, - // with the same `timestamp`, and with the same `insert_id` to be duplicates - // which can be removed. If omitted in new log entries, then - // Logging assigns its own unique identifier. The `insert_id` is also used - // to order log entries that have the same `timestamp` value. + // then Logging considers other log entries in the same project, with the same + // `timestamp`, and with the same `insert_id` to be duplicates which can be + // removed. If omitted in new log entries, then Logging assigns its own unique + // identifier. The `insert_id` is also used to order log entries that have the + // same `timestamp` value. string insert_id = 4; - // Optional. Information about the HTTP request associated with this - // log entry, if applicable. + // Optional. Information about the HTTP request associated with this log + // entry, if applicable. google.logging.type.HttpRequest http_request = 7; // Optional. A set of user-defined (key, value) data that provides additional // information about the log entry. map labels = 11; - // Output only. Additional metadata about the monitored resource. + // Deprecated. Output only. Additional metadata about the monitored resource. + // // Only `k8s_container`, `k8s_pod`, and `k8s_node` MonitoredResources have - // this field populated. - google.api.MonitoredResourceMetadata metadata = 25; + // this field populated for GKE versions older than 1.12.6. For GKE versions + // 1.12.6 and above, the `metadata` field has been deprecated. The Kubernetes + // pod labels that used to be in `metadata.userLabels` will now be present in + // the `labels` field with a key prefix of `k8s-pod/`. The Stackdriver system + // labels that were present in the `metadata.systemLabels` field will no + // longer be available in the LogEntry. + google.api.MonitoredResourceMetadata metadata = 25 [deprecated = true]; // Optional. Information about an operation associated with the log entry, if // applicable. @@ -134,12 +147,14 @@ message LogEntry { string trace = 22; // Optional. The span ID within the trace associated with the log entry. - // For Trace spans, this is the same format that the Trace - // API v2 uses: a 16-character hexadecimal encoding of an 8-byte array, such - // as "000000000000004a". + // + // For Trace spans, this is the same format that the Trace API v2 uses: a + // 16-character hexadecimal encoding of an 8-byte array, such as + // "000000000000004a". string span_id = 27; // Optional. The sampling decision of the trace associated with the log entry. + // // True means that the trace resource name in the `trace` field was sampled // for storage in a trace backend. False means that the trace was not sampled // for storage when this log entry was written, or the sampling decision was @@ -155,12 +170,12 @@ message LogEntry { // Additional information about a potentially long-running operation with which // a log entry is associated. message LogEntryOperation { - // Optional. An arbitrary operation identifier. Log entries with the - // same identifier are assumed to be part of the same operation. + // Optional. An arbitrary operation identifier. Log entries with the same + // identifier are assumed to be part of the same operation. string id = 1; - // Optional. An arbitrary producer identifier. The combination of - // `id` and `producer` must be globally unique. Examples for `producer`: + // Optional. An arbitrary producer identifier. The combination of `id` and + // `producer` must be globally unique. Examples for `producer`: // `"MyDivision.MyBigCompany.com"`, `"github.com/MyProject/MyApplication"`. string producer = 2; diff --git a/logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/logging/google/cloud/logging_v2/proto/log_entry_pb2.py index 6dc9ec5817f1..c2517d84adae 100644 --- a/logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/log_entry.proto @@ -16,7 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import ( monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, ) @@ -29,6 +27,8 @@ from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -39,16 +39,17 @@ "\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x8a\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12\x37\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadata\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\x8e\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12;\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadataB\x02\x18\x01\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR, google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR, google_dot_protobuf_dot_any__pb2.DESCRIPTOR, google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) @@ -105,8 +106,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1028, - serialized_end=1073, + serialized_start=1057, + serialized_end=1102, ) _LOGENTRY = _descriptor.Descriptor( @@ -329,7 +330,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\030\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -424,7 +425,7 @@ ), ], extensions=[], - nested_types=[_LOGENTRY_LABELSENTRY], + nested_types=[_LOGENTRY_LABELSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -437,10 +438,10 @@ index=0, containing_type=None, fields=[], - ) + ), ], - serialized_start=306, - serialized_end=1084, + serialized_start=331, + serialized_end=1113, ) @@ -532,8 +533,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1086, - serialized_end=1164, + serialized_start=1115, + serialized_end=1193, ) @@ -607,8 +608,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1166, - serialized_end=1236, + serialized_start=1195, + serialized_end=1265, ) _LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY @@ -703,17 +704,20 @@ for a log name with a leading slash will never return any results. resource: - Required. The primary monitored resource associated with this - log entry. Example: a log entry that reports a database error - would be associated with the monitored resource designating - the particular database that reported the error. + Required. The monitored resource that produced this log entry. + Example: a log entry that reports a database error would be + associated with the monitored resource designating the + particular database that reported the error. payload: Optional. The log entry payload, which can be one of multiple types. proto_payload: The log entry payload, represented as a protocol buffer. Some Google Cloud Platform services use this field for their log - entry payloads. + entry payloads. The following protocol buffer types are + supported; user-defined types are not supported: + "type.googleapis.com/google.cloud.audit.AuditLog" + "type.googleapis.com/google.appengine.logging.v1.RequestLog" text_payload: The log entry payload, represented as a Unicode string (UTF-8). @@ -732,7 +736,7 @@ `__ in the past, and no more than 24 hours in the future. Log entries outside those time boundaries will not be available when calling ``entries.list``, but those log - entries can still be exported with `LogSinks + entries can still be `exported with LogSinks `__. receive_timestamp: Output only. The time the log entry was received by Logging. @@ -754,9 +758,17 @@ Optional. A set of user-defined (key, value) data that provides additional information about the log entry. metadata: - Output only. Additional metadata about the monitored resource. - Only ``k8s_container``, ``k8s_pod``, and ``k8s_node`` - MonitoredResources have this field populated. + Deprecated. Output only. Additional metadata about the + monitored resource. Only ``k8s_container``, ``k8s_pod``, and + ``k8s_node`` MonitoredResources have this field populated for + GKE versions older than 1.12.6. For GKE versions 1.12.6 and + above, the ``metadata`` field has been deprecated. The + Kubernetes pod labels that used to be in + ``metadata.userLabels`` will now be present in the ``labels`` + field with a key prefix of ``k8s-pod/``. The Stackdriver + system labels that were present in the + ``metadata.systemLabels`` field will no longer be available in + the LogEntry. operation: Optional. Information about an operation associated with the log entry, if applicable. @@ -768,12 +780,12 @@ projectid/traces/06796866738c859f2f19b7cfb3214824`` span_id: Optional. The span ID within the trace associated with the log - entry. For Trace spans, this is the same format that the Trace - API v2 uses: a 16-character hexadecimal encoding of an 8-byte - array, such as "000000000000004a". + entry. For Trace spans, this is the same format that the + Trace API v2 uses: a 16-character hexadecimal encoding of an + 8-byte array, such as "000000000000004a". trace_sampled: Optional. The sampling decision of the trace associated with - the log entry. True means that the trace resource name in the + the log entry. True means that the trace resource name in the ``trace`` field was sampled for storage in a trace backend. False means that the trace was not sampled for storage when this log entry was written, or the sampling decision was @@ -857,4 +869,5 @@ DESCRIPTOR._options = None _LOGENTRY_LABELSENTRY._options = None +_LOGENTRY.fields_by_name["metadata"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/log_severity.proto b/logging/google/cloud/logging_v2/proto/log_severity.proto deleted file mode 100644 index ccb08cacb445..000000000000 --- a/logging/google/cloud/logging_v2/proto/log_severity.proto +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2018 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.logging.type; - -import "google/api/annotations.proto"; - -option csharp_namespace = "Google.Cloud.Logging.Type"; -option go_package = "google.golang.org/genproto/googleapis/logging/type;ltype"; -option java_multiple_files = true; -option java_outer_classname = "LogSeverityProto"; -option java_package = "com.google.logging.type"; -option php_namespace = "Google\\Cloud\\Logging\\Type"; - - -// The severity of the event described in a log entry, expressed as one of the -// standard severity levels listed below. For your reference, the levels are -// assigned the listed numeric values. The effect of using numeric values other -// than those listed is undefined. -// -// You can filter for log entries by severity. For example, the following -// filter expression will match log entries with severities `INFO`, `NOTICE`, -// and `WARNING`: -// -// severity > DEBUG AND severity <= WARNING -// -// If you are writing log entries, you should map other severity encodings to -// one of these standard levels. For example, you might map all of Java's FINE, -// FINER, and FINEST levels to `LogSeverity.DEBUG`. You can preserve the -// original severity level in the log entry payload if you wish. -enum LogSeverity { - // (0) The log entry has no assigned severity level. - DEFAULT = 0; - - // (100) Debug or trace information. - DEBUG = 100; - - // (200) Routine information, such as ongoing status or performance. - INFO = 200; - - // (300) Normal but significant events, such as start up, shut down, or - // a configuration change. - NOTICE = 300; - - // (400) Warning events might cause problems. - WARNING = 400; - - // (500) Error events are likely to cause problems. - ERROR = 500; - - // (600) Critical events cause more severe problems or outages. - CRITICAL = 600; - - // (700) A person must take an action immediately. - ALERT = 700; - - // (800) One or more systems are unusable. - EMERGENCY = 800; -} diff --git a/logging/google/cloud/logging_v2/proto/logging.proto b/logging/google/cloud/logging_v2/proto/logging.proto index d04cd5c03dd0..fc4217593770 100644 --- a/logging/google/cloud/logging_v2/proto/logging.proto +++ b/logging/google/cloud/logging_v2/proto/logging.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,13 +17,15 @@ syntax = "proto3"; package google.logging.v2; -import "google/api/annotations.proto"; import "google/api/monitored_resource.proto"; import "google/logging/v2/log_entry.proto"; +import "google/logging/v2/logging_config.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; +import "google/api/annotations.proto"; +import "google/api/client.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Logging.V2"; @@ -35,6 +37,14 @@ option php_namespace = "Google\\Cloud\\Logging\\V2"; // Service for ingesting and querying logs. service LoggingServiceV2 { + option (google.api.default_host) = "logging.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/cloud-platform.read-only," + "https://www.googleapis.com/auth/logging.admin," + "https://www.googleapis.com/auth/logging.read," + "https://www.googleapis.com/auth/logging.write"; + // Deletes all the log entries in a log. // The log reappears if it receives new entries. // Log entries written shortly before the delete operation might not be @@ -42,9 +52,18 @@ service LoggingServiceV2 { rpc DeleteLog(DeleteLogRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v2/{log_name=projects/*/logs/*}" - additional_bindings { delete: "/v2/{log_name=organizations/*/logs/*}" } - additional_bindings { delete: "/v2/{log_name=folders/*/logs/*}" } - additional_bindings { delete: "/v2/{log_name=billingAccounts/*/logs/*}" } + additional_bindings { + delete: "/v2/{log_name=*/*/logs/*}" + } + additional_bindings { + delete: "/v2/{log_name=organizations/*/logs/*}" + } + additional_bindings { + delete: "/v2/{log_name=folders/*/logs/*}" + } + additional_bindings { + delete: "/v2/{log_name=billingAccounts/*/logs/*}" + } }; } @@ -55,17 +74,16 @@ service LoggingServiceV2 { // A single request may contain log entries for a maximum of 1000 // different resources (projects, organizations, billing accounts or // folders) - rpc WriteLogEntries(WriteLogEntriesRequest) - returns (WriteLogEntriesResponse) { + rpc WriteLogEntries(WriteLogEntriesRequest) returns (WriteLogEntriesResponse) { option (google.api.http) = { post: "/v2/entries:write" body: "*" }; } - // Lists log entries. Use this method to retrieve log entries from - // Logging. For ways to export log entries, see - // [Exporting Logs](/logging/docs/export). + // Lists log entries. Use this method to retrieve log entries that originated + // from a project/folder/organization/billing account. For ways to export log + // entries, see [Exporting Logs](/logging/docs/export). rpc ListLogEntries(ListLogEntriesRequest) returns (ListLogEntriesResponse) { option (google.api.http) = { post: "/v2/entries:list" @@ -74,8 +92,7 @@ service LoggingServiceV2 { } // Lists the descriptors for monitored resource types used by Logging. - rpc ListMonitoredResourceDescriptors(ListMonitoredResourceDescriptorsRequest) - returns (ListMonitoredResourceDescriptorsResponse) { + rpc ListMonitoredResourceDescriptors(ListMonitoredResourceDescriptorsRequest) returns (ListMonitoredResourceDescriptorsResponse) { option (google.api.http) = { get: "/v2/monitoredResourceDescriptors" }; @@ -86,10 +103,18 @@ service LoggingServiceV2 { rpc ListLogs(ListLogsRequest) returns (ListLogsResponse) { option (google.api.http) = { get: "/v2/{parent=*/*}/logs" - additional_bindings { get: "/v2/{parent=projects/*}/logs" } - additional_bindings { get: "/v2/{parent=organizations/*}/logs" } - additional_bindings { get: "/v2/{parent=folders/*}/logs" } - additional_bindings { get: "/v2/{parent=billingAccounts/*}/logs" } + additional_bindings { + get: "/v2/{parent=projects/*}/logs" + } + additional_bindings { + get: "/v2/{parent=organizations/*}/logs" + } + additional_bindings { + get: "/v2/{parent=folders/*}/logs" + } + additional_bindings { + get: "/v2/{parent=billingAccounts/*}/logs" + } }; } } @@ -164,8 +189,8 @@ message WriteLogEntriesRequest { // Log entries with timestamps that are more than the // [logs retention period](/logging/quota-policy) in the past or more than // 24 hours in the future will not be available when calling `entries.list`. - // However, those log entries can still be exported with - // [LogSinks](/logging/docs/api/tasks/exporting-logs). + // However, those log entries can still be + // [exported with LogSinks](/logging/docs/api/tasks/exporting-logs). // // To improve throughput and to avoid exceeding the // [quota limit](/logging/quota-policy) for calls to `entries.write`, @@ -188,7 +213,9 @@ message WriteLogEntriesRequest { // Result returned from WriteLogEntries. // empty -message WriteLogEntriesResponse {} +message WriteLogEntriesResponse { + +} // Error details for WriteLogEntries with partial success. message WriteLogEntriesPartialErrors { @@ -205,9 +232,7 @@ message WriteLogEntriesPartialErrors { message ListLogEntriesRequest { // Deprecated. Use `resource_names` instead. One or more project identifiers // or project numbers from which to retrieve log entries. Example: - // `"my-project-1A"`. If present, these project identifiers are converted to - // resource name format and added to the list of resources in - // `resource_names`. + // `"my-project-1A"`. repeated string project_ids = 1 [deprecated = true]; // Required. Names of one or more parent resources from which to @@ -218,6 +243,7 @@ message ListLogEntriesRequest { // "billingAccounts/[BILLING_ACCOUNT_ID]" // "folders/[FOLDER_ID]" // + // // Projects listed in the `project_ids` field are added to this list. repeated string resource_names = 8; @@ -320,8 +346,8 @@ message ListLogsRequest { // Result returned from ListLogs. message ListLogsResponse { // A list of log names. For example, - // `"projects/my-project/syslog"` or - // `"organizations/123/cloudresourcemanager.googleapis.com%2Factivity"`. + // `"projects/my-project/logs/syslog"` or + // `"organizations/123/logs/cloudresourcemanager.googleapis.com%2Factivity"`. repeated string log_names = 3; // If there might be more results than those appearing in this response, then diff --git a/logging/google/cloud/logging_v2/proto/logging_config.proto b/logging/google/cloud/logging_v2/proto/logging_config.proto index 2afea1062df5..a9ccdf51cb19 100644 --- a/logging/google/cloud/logging_v2/proto/logging_config.proto +++ b/logging/google/cloud/logging_v2/proto/logging_config.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,10 +17,12 @@ syntax = "proto3"; package google.logging.v2; -import "google/api/annotations.proto"; +import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; +import "google/api/client.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Logging.V2"; @@ -30,17 +32,31 @@ option java_outer_classname = "LoggingConfigProto"; option java_package = "com.google.logging.v2"; option php_namespace = "Google\\Cloud\\Logging\\V2"; -// Service for configuring sinks used to export log entries out of -// Logging. +// Service for configuring sinks used to route log entries. service ConfigServiceV2 { + option (google.api.default_host) = "logging.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/cloud-platform.read-only," + "https://www.googleapis.com/auth/logging.admin," + "https://www.googleapis.com/auth/logging.read"; + // Lists sinks. rpc ListSinks(ListSinksRequest) returns (ListSinksResponse) { option (google.api.http) = { get: "/v2/{parent=*/*}/sinks" - additional_bindings { get: "/v2/{parent=projects/*}/sinks" } - additional_bindings { get: "/v2/{parent=organizations/*}/sinks" } - additional_bindings { get: "/v2/{parent=folders/*}/sinks" } - additional_bindings { get: "/v2/{parent=billingAccounts/*}/sinks" } + additional_bindings { + get: "/v2/{parent=projects/*}/sinks" + } + additional_bindings { + get: "/v2/{parent=organizations/*}/sinks" + } + additional_bindings { + get: "/v2/{parent=folders/*}/sinks" + } + additional_bindings { + get: "/v2/{parent=billingAccounts/*}/sinks" + } }; } @@ -48,27 +64,41 @@ service ConfigServiceV2 { rpc GetSink(GetSinkRequest) returns (LogSink) { option (google.api.http) = { get: "/v2/{sink_name=*/*/sinks/*}" - additional_bindings { get: "/v2/{sink_name=projects/*/sinks/*}" } - additional_bindings { get: "/v2/{sink_name=organizations/*/sinks/*}" } - additional_bindings { get: "/v2/{sink_name=folders/*/sinks/*}" } - additional_bindings { get: "/v2/{sink_name=billingAccounts/*/sinks/*}" } + additional_bindings { + get: "/v2/{sink_name=projects/*/sinks/*}" + } + additional_bindings { + get: "/v2/{sink_name=organizations/*/sinks/*}" + } + additional_bindings { + get: "/v2/{sink_name=folders/*/sinks/*}" + } + additional_bindings { + get: "/v2/{sink_name=billingAccounts/*/sinks/*}" + } }; } - // Creates a sink that exports specified log entries to a destination. The + // Creates a sink that exports specified log entries to a destination. The // export of newly-ingested log entries begins immediately, unless the sink's - // `writer_identity` is not permitted to write to the destination. A sink can + // `writer_identity` is not permitted to write to the destination. A sink can // export log entries only from the resource owning the sink. rpc CreateSink(CreateSinkRequest) returns (LogSink) { option (google.api.http) = { post: "/v2/{parent=*/*}/sinks" body: "sink" - additional_bindings { post: "/v2/{parent=projects/*}/sinks" body: "sink" } + additional_bindings { + post: "/v2/{parent=projects/*}/sinks" + body: "sink" + } additional_bindings { post: "/v2/{parent=organizations/*}/sinks" body: "sink" } - additional_bindings { post: "/v2/{parent=folders/*}/sinks" body: "sink" } + additional_bindings { + post: "/v2/{parent=folders/*}/sinks" + body: "sink" + } additional_bindings { post: "/v2/{parent=billingAccounts/*}/sinks" body: "sink" @@ -76,8 +106,9 @@ service ConfigServiceV2 { }; } - // Updates a sink. This method replaces the following fields in the existing + // Updates a sink. This method replaces the following fields in the existing // sink with values from the new sink: `destination`, and `filter`. + // // The updated sink might also have a new `writer_identity`; see the // `unique_writer_identity` field. rpc UpdateSink(UpdateSinkRequest) returns (LogSink) { @@ -124,9 +155,15 @@ service ConfigServiceV2 { rpc DeleteSink(DeleteSinkRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v2/{sink_name=*/*/sinks/*}" - additional_bindings { delete: "/v2/{sink_name=projects/*/sinks/*}" } - additional_bindings { delete: "/v2/{sink_name=organizations/*/sinks/*}" } - additional_bindings { delete: "/v2/{sink_name=folders/*/sinks/*}" } + additional_bindings { + delete: "/v2/{sink_name=projects/*/sinks/*}" + } + additional_bindings { + delete: "/v2/{sink_name=organizations/*/sinks/*}" + } + additional_bindings { + delete: "/v2/{sink_name=folders/*/sinks/*}" + } additional_bindings { delete: "/v2/{sink_name=billingAccounts/*/sinks/*}" } @@ -137,10 +174,18 @@ service ConfigServiceV2 { rpc ListExclusions(ListExclusionsRequest) returns (ListExclusionsResponse) { option (google.api.http) = { get: "/v2/{parent=*/*}/exclusions" - additional_bindings { get: "/v2/{parent=projects/*}/exclusions" } - additional_bindings { get: "/v2/{parent=organizations/*}/exclusions" } - additional_bindings { get: "/v2/{parent=folders/*}/exclusions" } - additional_bindings { get: "/v2/{parent=billingAccounts/*}/exclusions" } + additional_bindings { + get: "/v2/{parent=projects/*}/exclusions" + } + additional_bindings { + get: "/v2/{parent=organizations/*}/exclusions" + } + additional_bindings { + get: "/v2/{parent=folders/*}/exclusions" + } + additional_bindings { + get: "/v2/{parent=billingAccounts/*}/exclusions" + } }; } @@ -148,10 +193,18 @@ service ConfigServiceV2 { rpc GetExclusion(GetExclusionRequest) returns (LogExclusion) { option (google.api.http) = { get: "/v2/{name=*/*/exclusions/*}" - additional_bindings { get: "/v2/{name=projects/*/exclusions/*}" } - additional_bindings { get: "/v2/{name=organizations/*/exclusions/*}" } - additional_bindings { get: "/v2/{name=folders/*/exclusions/*}" } - additional_bindings { get: "/v2/{name=billingAccounts/*/exclusions/*}" } + additional_bindings { + get: "/v2/{name=projects/*/exclusions/*}" + } + additional_bindings { + get: "/v2/{name=organizations/*/exclusions/*}" + } + additional_bindings { + get: "/v2/{name=folders/*/exclusions/*}" + } + additional_bindings { + get: "/v2/{name=billingAccounts/*/exclusions/*}" + } }; } @@ -209,9 +262,15 @@ service ConfigServiceV2 { rpc DeleteExclusion(DeleteExclusionRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v2/{name=*/*/exclusions/*}" - additional_bindings { delete: "/v2/{name=projects/*/exclusions/*}" } - additional_bindings { delete: "/v2/{name=organizations/*/exclusions/*}" } - additional_bindings { delete: "/v2/{name=folders/*/exclusions/*}" } + additional_bindings { + delete: "/v2/{name=projects/*/exclusions/*}" + } + additional_bindings { + delete: "/v2/{name=organizations/*/exclusions/*}" + } + additional_bindings { + delete: "/v2/{name=folders/*/exclusions/*}" + } additional_bindings { delete: "/v2/{name=billingAccounts/*/exclusions/*}" } @@ -221,9 +280,9 @@ service ConfigServiceV2 { // Describes a sink used to export log entries to one of the following // destinations in any project: a Cloud Storage bucket, a BigQuery dataset, or a -// Cloud Pub/Sub topic. A logs filter controls which log entries are -// exported. The sink must be created within a project, organization, billing -// account, or folder. +// Cloud Pub/Sub topic. A logs filter controls which log entries are exported. +// The sink must be created within a project, organization, billing account, or +// folder. message LogSink { // Available log entry formats. Log entries can be written to // Logging in either format and can be exported in either format. @@ -240,7 +299,7 @@ message LogSink { } // Required. The client-assigned sink identifier, unique within the - // project. Example: `"my-syslog-errors-to-pubsub"`. Sink identifiers are + // project. Example: `"my-syslog-errors-to-pubsub"`. Sink identifiers are // limited to 100 characters and can include only the following characters: // upper and lower-case alphanumeric characters, underscores, hyphens, and // periods. @@ -254,34 +313,33 @@ message LogSink { // // The sink's `writer_identity`, set when the sink is created, must // have permission to write to the destination or else the log - // entries are not exported. For more information, see - // [Exporting Logs With Sinks](/logging/docs/api/tasks/exporting-logs). + // entries are not exported. For more information, see + // [Exporting Logs with Sinks](/logging/docs/api/tasks/exporting-logs). string destination = 3; - // Optional. - // An [advanced logs filter](/logging/docs/view/advanced_filters). The only + // Optional. An [advanced logs filter](/logging/docs/view/advanced-queries). The only // exported log entries are those that are in the resource owning the sink and - // that match the filter. For example: + // that match the filter. For example: // // logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND severity>=ERROR string filter = 5; // Deprecated. The log entry format to use for this sink's exported log - // entries. The v2 format is used by default and cannot be changed. + // entries. The v2 format is used by default and cannot be changed. VersionFormat output_version_format = 6 [deprecated = true]; // Output only. An IAM identity—a service account or group—under - // which Logging writes the exported log entries to the sink's - // destination. This field is set by - // [sinks.create](/logging/docs/api/reference/rest/v2/projects.sinks/create) + // which Logging writes the exported log entries to the sink's destination. + // This field is set by + // [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] // and - // [sinks.update](/logging/docs/api/reference/rest/v2/projects.sinks/update), - // based on the setting of `unique_writer_identity` in those methods. + // [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] + // based on the value of `unique_writer_identity` in those methods. // // Until you grant this identity write-access to the destination, log entry // exports from this sink will fail. For more information, - // see [Granting access for a - // resource](/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource). + // see [Granting Access for a + // Resource](/iam/docs/granting-roles-to-service-accounts#granting_access_to_a_service_account_for_a_resource). // Consult the destination service's documentation to determine the // appropriate IAM roles to assign to the identity. string writer_identity = 8; @@ -301,13 +359,41 @@ message LogSink { // resource.type=gce_instance bool include_children = 9; - // Deprecated. This field is ignored when creating or updating sinks. + // Optional. Destination dependent options. + oneof options { + // Optional. Options that affect sinks exporting data to BigQuery. + BigQueryOptions bigquery_options = 12; + } + + // Output only. The creation timestamp of the sink. + // + // This field may not be present for older sinks. + google.protobuf.Timestamp create_time = 13; + + // Output only. The last update timestamp of the sink. + // + // This field may not be present for older sinks. + google.protobuf.Timestamp update_time = 14; + + // Do not use. This field is ignored. google.protobuf.Timestamp start_time = 10 [deprecated = true]; - // Deprecated. This field is ignored when creating or updating sinks. + // Do not use. This field is ignored. google.protobuf.Timestamp end_time = 11 [deprecated = true]; } +// Options that change functionality of a sink exporting data to BigQuery. +message BigQueryOptions { + // Optional. Whether to use [BigQuery's partition + // tables](/bigquery/docs/partitioned-tables). By default, Logging + // creates dated tables based on the log entries' timestamps, e.g. + // syslog_20170523. With partitioned tables the date suffix is no longer + // present and [special query + // syntax](/bigquery/docs/querying-partitioned-tables) has to be used instead. + // In both cases, tables are sharded based on UTC timezone. + bool use_partitioned_tables = 1; +} + // The parameters to `ListSinks`. message ListSinksRequest { // Required. The parent resource whose sinks are to be listed: @@ -319,13 +405,13 @@ message ListSinksRequest { string parent = 1; // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method // parameters should be identical to those in the previous call. string page_token = 2; // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the + // Non-positive values are ignored. The presence of `nextPageToken` in the // response indicates that more results might be available. int32 page_size = 3; } @@ -336,7 +422,7 @@ message ListSinksResponse { repeated LogSink sinks = 1; // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call the same + // `nextPageToken` is included. To get the next set of results, call the same // method again using the value of `nextPageToken` as `pageToken`. string next_page_token = 2; } @@ -371,17 +457,16 @@ message CreateSinkRequest { LogSink sink = 2; // Optional. Determines the kind of IAM identity returned as `writer_identity` - // in the new sink. If this value is omitted or set to false, and if the + // in the new sink. If this value is omitted or set to false, and if the // sink's parent is a project, then the value returned as `writer_identity` is - // the same group or service account used by Logging before the - // addition of writer identities to this API. The sink's destination must be - // in the same project as the sink itself. + // the same group or service account used by Logging before the addition of + // writer identities to this API. The sink's destination must be in the same + // project as the sink itself. // // If this field is set to true, or if the sink is owned by a non-project // resource such as an organization, then the value of `writer_identity` will - // be a unique service account used only for exports from the new sink. For - // more information, see `writer_identity` in - // [LogSink][google.logging.v2.LogSink]. + // be a unique service account used only for exports from the new sink. For + // more information, see `writer_identity` in [LogSink][google.logging.v2.LogSink]. bool unique_writer_identity = 3; } @@ -402,9 +487,8 @@ message UpdateSinkRequest { // as part of `sink_name`. LogSink sink = 2; - // Optional. See - // [sinks.create](/logging/docs/api/reference/rest/v2/projects.sinks/create) - // for a description of this field. When updating a sink, the effect of this + // Optional. See [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + // for a description of this field. When updating a sink, the effect of this // field on the value of `writer_identity` in the updated sink depends on both // the old and new values of this field: // @@ -418,7 +502,7 @@ message UpdateSinkRequest { // Optional. Field mask that specifies the fields in `sink` that need // an update. A sink field will be overwritten if, and only if, it is - // in the update mask. `name` and output only fields cannot be updated. + // in the update mask. `name` and output only fields cannot be updated. // // An empty updateMask is temporarily treated as using the following mask // for backwards compatibility purposes: @@ -448,11 +532,11 @@ message DeleteSinkRequest { } // Specifies a set of log entries that are not to be stored in -// Logging. If your project receives a large volume of logs, you might be able -// to use exclusions to reduce your chargeable logs. Exclusions are processed -// after log sinks, so you can export log entries before they are excluded. -// Audit log entries and log entries from Amazon Web Services are never -// excluded. +// Logging. If your GCP resource receives a large volume of logs, you can +// use exclusions to reduce your chargeable logs. Exclusions are +// processed after log sinks, so you can export log entries before they are +// excluded. Note that organization-level and folder-level exclusions don't +// apply to child resources, and that you can't exclude audit log entries. message LogExclusion { // Required. A client-assigned identifier, such as // `"load-balancer-exclusion"`. Identifiers are limited to 100 characters and @@ -462,22 +546,31 @@ message LogExclusion { // Optional. A description of this exclusion. string description = 2; - // Required. - // An [advanced logs filter](/logging/docs/view/advanced_filters) + // Required. An [advanced logs filter](/logging/docs/view/advanced-queries) // that matches the log entries to be excluded. By using the - // [sample function](/logging/docs/view/advanced_filters#sample), + // [sample function](/logging/docs/view/advanced-queries#sample), // you can exclude less than 100% of the matching log entries. - // For example, the following filter matches 99% of low-severity log - // entries from load balancers: + // For example, the following query matches 99% of low-severity log + // entries from Google Cloud Storage buckets: // - // `"resource.type=http_load_balancer severity\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32".google.logging.v2.BigQueryOptionsH\x00\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\x42\t\n\x07options"1\n\x0f\x42igQueryOptions\x12\x1e\n\x16use_partitioned_tables\x18\x01 \x01(\x08"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"\xb5\x01\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\xf6\x1a\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\x1a\xdf\x01\xca\x41\x16logging.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.readB\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -63,8 +66,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=492, - serialized_end=555, + serialized_start=711, + serialized_end=774, ) _sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) @@ -184,10 +187,64 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="bigquery_options", + full_name="google.logging.v2.LogSink.bigquery_options", + index=6, + number=12, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.logging.v2.LogSink.create_time", + index=7, + number=13, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.logging.v2.LogSink.update_time", + index=8, + number=14, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="start_time", full_name="google.logging.v2.LogSink.start_time", - index=6, + index=9, number=10, type=11, cpp_type=10, @@ -205,7 +262,7 @@ _descriptor.FieldDescriptor( name="end_time", full_name="google.logging.v2.LogSink.end_time", - index=7, + index=10, number=11, type=11, cpp_type=10, @@ -223,14 +280,61 @@ ], extensions=[], nested_types=[], - enum_types=[_LOGSINK_VERSIONFORMAT], + enum_types=[_LOGSINK_VERSIONFORMAT,], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="options", + full_name="google.logging.v2.LogSink.options", + index=0, + containing_type=None, + fields=[], + ), + ], + serialized_start=257, + serialized_end=785, +) + + +_BIGQUERYOPTIONS = _descriptor.Descriptor( + name="BigQueryOptions", + full_name="google.logging.v2.BigQueryOptions", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="use_partitioned_tables", + full_name="google.logging.v2.BigQueryOptions.use_partitioned_tables", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=200, - serialized_end=555, + serialized_start=787, + serialized_end=836, ) @@ -304,8 +408,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=557, - serialized_end=630, + serialized_start=838, + serialized_end=911, ) @@ -361,8 +465,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=632, - serialized_end=719, + serialized_start=913, + serialized_end=1000, ) @@ -390,7 +494,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -400,8 +504,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=721, - serialized_end=756, + serialized_start=1002, + serialized_end=1037, ) @@ -475,8 +579,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=758, - serialized_end=867, + serialized_start=1039, + serialized_end=1148, ) @@ -568,8 +672,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=870, - serialized_end=1031, + serialized_start=1151, + serialized_end=1312, ) @@ -597,7 +701,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -607,8 +711,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1033, - serialized_end=1071, + serialized_start=1314, + serialized_end=1352, ) @@ -691,6 +795,42 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.logging.v2.LogExclusion.create_time", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.logging.v2.LogExclusion.update_time", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -700,8 +840,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1073, - serialized_end=1156, + serialized_start=1355, + serialized_end=1536, ) @@ -775,8 +915,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1158, - serialized_end=1236, + serialized_start=1538, + serialized_end=1616, ) @@ -832,8 +972,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1238, - serialized_end=1340, + serialized_start=1618, + serialized_end=1720, ) @@ -861,7 +1001,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -871,8 +1011,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1342, - serialized_end=1377, + serialized_start=1722, + serialized_end=1757, ) @@ -928,8 +1068,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1379, - serialized_end=1471, + serialized_start=1759, + serialized_end=1851, ) @@ -1003,8 +1143,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1474, - serialized_end=1613, + serialized_start=1854, + serialized_end=1993, ) @@ -1032,7 +1172,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1042,11 +1182,18 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1615, - serialized_end=1653, + serialized_start=1995, + serialized_end=2033, ) _LOGSINK.fields_by_name["output_version_format"].enum_type = _LOGSINK_VERSIONFORMAT +_LOGSINK.fields_by_name["bigquery_options"].message_type = _BIGQUERYOPTIONS +_LOGSINK.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LOGSINK.fields_by_name[ "start_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP @@ -1054,12 +1201,24 @@ "end_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LOGSINK_VERSIONFORMAT.containing_type = _LOGSINK +_LOGSINK.oneofs_by_name["options"].fields.append( + _LOGSINK.fields_by_name["bigquery_options"] +) +_LOGSINK.fields_by_name["bigquery_options"].containing_oneof = _LOGSINK.oneofs_by_name[ + "options" +] _LISTSINKSRESPONSE.fields_by_name["sinks"].message_type = _LOGSINK _CREATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK _UPDATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK _UPDATESINKREQUEST.fields_by_name[ "update_mask" ].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LOGEXCLUSION.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGEXCLUSION.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LISTEXCLUSIONSRESPONSE.fields_by_name["exclusions"].message_type = _LOGEXCLUSION _CREATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION _UPDATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION @@ -1067,6 +1226,7 @@ "update_mask" ].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK DESCRIPTOR.message_types_by_name["LogSink"] = _LOGSINK +DESCRIPTOR.message_types_by_name["BigQueryOptions"] = _BIGQUERYOPTIONS DESCRIPTOR.message_types_by_name["ListSinksRequest"] = _LISTSINKSREQUEST DESCRIPTOR.message_types_by_name["ListSinksResponse"] = _LISTSINKSRESPONSE DESCRIPTOR.message_types_by_name["GetSinkRequest"] = _GETSINKREQUEST @@ -1110,10 +1270,10 @@ sink's ``writer_identity``, set when the sink is created, must have permission to write to the destination or else the log entries are not exported. For more information, see `Exporting - Logs With Sinks `__. + Logs with Sinks `__. filter: Optional. An `advanced logs filter - `__. The only exported + `__. The only exported log entries are those that are in the resource owning the sink and that match the filter. For example: :: logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND @@ -1125,17 +1285,18 @@ writer_identity: Output only. An IAM identity—a service account or group—under which Logging writes the exported log entries to the sink's - destination. This field is set by `sinks.create - `__ - and `sinks.update `__, based on the setting of - ``unique_writer_identity`` in those methods. Until you grant - this identity write-access to the destination, log entry - exports from this sink will fail. For more information, see - `Granting access for a resource `__. Consult the destination service's documentation to - determine the appropriate IAM roles to assign to the identity. + destination. This field is set by + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + and + [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] + based on the value of ``unique_writer_identity`` in those + methods. Until you grant this identity write-access to the + destination, log entry exports from this sink will fail. For + more information, see `Granting Access for a Resource + `__. Consult the + destination service's documentation to determine the + appropriate IAM roles to assign to the identity. include_children: Optional. This field applies only to sinks owned by organizations and folders. If the field is false, the default, @@ -1151,18 +1312,52 @@ certain child projects, filter on the project part of the log name: :: logName:("projects/test-project1/" OR "projects/test-project2/") AND resource.type=gce_instance + options: + Optional. Destination dependent options. + bigquery_options: + Optional. Options that affect sinks exporting data to + BigQuery. + create_time: + Output only. The creation timestamp of the sink. This field + may not be present for older sinks. + update_time: + Output only. The last update timestamp of the sink. This + field may not be present for older sinks. start_time: - Deprecated. This field is ignored when creating or updating - sinks. + Do not use. This field is ignored. end_time: - Deprecated. This field is ignored when creating or updating - sinks. + Do not use. This field is ignored. """, # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) ), ) _sym_db.RegisterMessage(LogSink) +BigQueryOptions = _reflection.GeneratedProtocolMessageType( + "BigQueryOptions", + (_message.Message,), + dict( + DESCRIPTOR=_BIGQUERYOPTIONS, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""Options that change functionality of a sink exporting data to BigQuery. + + + Attributes: + use_partitioned_tables: + Optional. Whether to use `BigQuery's partition tables + `__. By default, Logging + creates dated tables based on the log entries' timestamps, + e.g. syslog\_20170523. With partitioned tables the date suffix + is no longer present and `special query syntax + `__ has to be used + instead. In both cases, tables are sharded based on UTC + timezone. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.BigQueryOptions) + ), +) +_sym_db.RegisterMessage(BigQueryOptions) + ListSinksRequest = _reflection.GeneratedProtocolMessageType( "ListSinksRequest", (_message.Message,), @@ -1302,8 +1497,8 @@ Required. The updated sink, whose name is the same identifier that appears as part of ``sink_name``. unique_writer_identity: - Optional. See `sinks.create - `__ + Optional. See + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] for a description of this field. When updating a sink, the effect of this field on the value of ``writer_identity`` in the updated sink depends on both the old and new values of @@ -1364,11 +1559,11 @@ DESCRIPTOR=_LOGEXCLUSION, __module__="google.cloud.logging_v2.proto.logging_config_pb2", __doc__="""Specifies a set of log entries that are not to be stored in Logging. If - your project receives a large volume of logs, you might be able to use + your GCP resource receives a large volume of logs, you can use exclusions to reduce your chargeable logs. Exclusions are processed after log sinks, so you can export log entries before they are excluded. - Audit log entries and log entries from Amazon Web Services are never - excluded. + Note that organization-level and folder-level exclusions don't apply to + child resources, and that you can't exclude audit log entries. Attributes: @@ -1381,19 +1576,25 @@ Optional. A description of this exclusion. filter: Required. An `advanced logs filter - `__ that matches the log + `__ that matches the log entries to be excluded. By using the `sample function - `__, you can + `__, you can exclude less than 100% of the matching log entries. For - example, the following filter matches 99% of low-severity log - entries from load balancers: - ``"resource.type=http_load_balancer severity`__ to change the value of this field. + it does not exclude any log entries. You can [update an + exclusion][google.logging.v2.ConfigServiceV2.UpdateExclusion] + to change the value of this field. + create_time: + Output only. The creation timestamp of the exclusion. This + field may not be present for older exclusions. + update_time: + Output only. The last update timestamp of the exclusion. This + field may not be present for older exclusions. """, # @@protoc_insertion_point(class_scope:google.logging.v2.LogExclusion) ), @@ -1529,7 +1730,7 @@ Required. New values for the existing exclusion. Only the fields specified in ``update_mask`` are relevant. update_mask: - Required. A nonempty list of fields to change in the existing + Required. A non-empty list of fields to change in the existing exclusion. New values for the fields are taken from the corresponding fields in the [LogExclusion][google.logging.v2.LogExclusion] included in @@ -1579,9 +1780,11 @@ full_name="google.logging.v2.ConfigServiceV2", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1656, - serialized_end=4876, + serialized_options=_b( + "\312A\026logging.googleapis.com\322A\302\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read" + ), + serialized_start=2036, + serialized_end=5482, methods=[ _descriptor.MethodDescriptor( name="ListSinks", diff --git a/logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py index 6e93d39b46b4..b250dc7dec22 100644 --- a/logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py +++ b/logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py @@ -8,8 +8,7 @@ class ConfigServiceV2Stub(object): - """Service for configuring sinks used to export log entries out of - Logging. + """Service for configuring sinks used to route log entries. """ def __init__(self, channel): @@ -71,8 +70,7 @@ def __init__(self, channel): class ConfigServiceV2Servicer(object): - """Service for configuring sinks used to export log entries out of - Logging. + """Service for configuring sinks used to route log entries. """ def ListSinks(self, request, context): @@ -90,9 +88,9 @@ def GetSink(self, request, context): raise NotImplementedError("Method not implemented!") def CreateSink(self, request, context): - """Creates a sink that exports specified log entries to a destination. The + """Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's - `writer_identity` is not permitted to write to the destination. A sink can + `writer_identity` is not permitted to write to the destination. A sink can export log entries only from the resource owning the sink. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -100,8 +98,9 @@ def CreateSink(self, request, context): raise NotImplementedError("Method not implemented!") def UpdateSink(self, request, context): - """Updates a sink. This method replaces the following fields in the existing + """Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: `destination`, and `filter`. + The updated sink might also have a new `writer_identity`; see the `unique_writer_identity` field. """ diff --git a/logging/google/cloud/logging_v2/proto/logging_metrics.proto b/logging/google/cloud/logging_v2/proto/logging_metrics.proto index dd3fa87821dc..0c294b1013ee 100644 --- a/logging/google/cloud/logging_v2/proto/logging_metrics.proto +++ b/logging/google/cloud/logging_v2/proto/logging_metrics.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -17,12 +17,13 @@ syntax = "proto3"; package google.logging.v2; -import "google/api/annotations.proto"; import "google/api/distribution.proto"; import "google/api/metric.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; +import "google/api/client.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Logging.V2"; @@ -34,6 +35,14 @@ option php_namespace = "Google\\Cloud\\Logging\\V2"; // Service for configuring logs-based metrics. service MetricsServiceV2 { + option (google.api.default_host) = "logging.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/cloud-platform.read-only," + "https://www.googleapis.com/auth/logging.admin," + "https://www.googleapis.com/auth/logging.read," + "https://www.googleapis.com/auth/logging.write"; + // Lists logs-based metrics. rpc ListLogMetrics(ListLogMetricsRequest) returns (ListLogMetricsResponse) { option (google.api.http) = { @@ -72,8 +81,8 @@ service MetricsServiceV2 { } } -// Describes a logs-based metric. The value of the metric is the -// number of log entries that match a logs filter in a given time interval. +// Describes a logs-based metric. The value of the metric is the number of log +// entries that match a logs filter in a given time interval. // // Logs-based metric can also be used to extract values from logs and create a // a distribution of the values. The distribution records the statistics of the @@ -92,21 +101,20 @@ message LogMetric { // Required. The client-assigned metric identifier. // Examples: `"error_count"`, `"nginx/requests"`. // - // Metric identifiers are limited to 100 characters and can include - // only the following characters: `A-Z`, `a-z`, `0-9`, and the - // special characters `_-.,+!*',()%/`. The forward-slash character - // (`/`) denotes a hierarchy of name pieces, and it cannot be the - // first character of the name. + // Metric identifiers are limited to 100 characters and can include only the + // following characters: `A-Z`, `a-z`, `0-9`, and the special characters + // `_-.,+!*',()%/`. The forward-slash character (`/`) denotes a hierarchy of + // name pieces, and it cannot be the first character of the name. // // The metric identifier in this field must not be // [URL-encoded](https://en.wikipedia.org/wiki/Percent-encoding). - // However, when the metric identifier appears as the `[METRIC_ID]` - // part of a `metric_name` API parameter, then the metric identifier - // must be URL-encoded. Example: - // `"projects/my-project/metrics/nginx%2Frequests"`. + // However, when the metric identifier appears as the `[METRIC_ID]` part of a + // `metric_name` API parameter, then the metric identifier must be + // URL-encoded. Example: `"projects/my-project/metrics/nginx%2Frequests"`. string name = 1; // Optional. A description of this metric, which is used in documentation. + // The maximum length of the description is 8000 characters. string description = 2; // Required. An [advanced logs filter](/logging/docs/view/advanced_filters) @@ -183,6 +191,16 @@ message LogMetric { // used to create a histogram of the extracted values. google.api.Distribution.BucketOptions bucket_options = 8; + // Output only. The creation timestamp of the metric. + // + // This field may not be present for older metrics. + google.protobuf.Timestamp create_time = 9; + + // Output only. The last update timestamp of the metric. + // + // This field may not be present for older metrics. + google.protobuf.Timestamp update_time = 10; + // Deprecated. The API version that created or updated this metric. // The v2 format is used by default and cannot be changed. ApiVersion version = 4 [deprecated = true]; @@ -196,13 +214,13 @@ message ListLogMetricsRequest { string parent = 1; // Optional. If present, then retrieve the next batch of results from the - // preceding call to this method. `pageToken` must be the value of - // `nextPageToken` from the previous response. The values of other method + // preceding call to this method. `pageToken` must be the value of + // `nextPageToken` from the previous response. The values of other method // parameters should be identical to those in the previous call. string page_token = 2; // Optional. The maximum number of results to return from this request. - // Non-positive values are ignored. The presence of `nextPageToken` in the + // Non-positive values are ignored. The presence of `nextPageToken` in the // response indicates that more results might be available. int32 page_size = 3; } @@ -213,7 +231,7 @@ message ListLogMetricsResponse { repeated LogMetric metrics = 1; // If there might be more results than appear in this response, then - // `nextPageToken` is included. To get the next set of results, call this + // `nextPageToken` is included. To get the next set of results, call this // method again using the value of `nextPageToken` as `pageToken`. string next_page_token = 2; } diff --git a/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index fc50185871d2..1addc0a0b592 100644 --- a/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging_metrics.proto @@ -16,12 +15,13 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import distribution_pb2 as google_dot_api_dot_distribution__pb2 from google.api import metric_pb2 as google_dot_api_dot_metric__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -32,15 +32,16 @@ "\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xb1\x03\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xd4\x05\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}B\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\x93\x04\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12/\n\x0b\x63reate_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xe4\x07\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_distribution__pb2.DESCRIPTOR, google_dot_api_dot_metric__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -60,8 +61,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=662, - serialized_end=690, + serialized_start=785, + serialized_end=813, ) _sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) @@ -118,8 +119,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=606, - serialized_end=660, + serialized_start=729, + serialized_end=783, ) _LOGMETRIC = _descriptor.Descriptor( @@ -255,10 +256,46 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.logging.v2.LogMetric.create_time", + index=7, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.logging.v2.LogMetric.update_time", + index=8, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="version", full_name="google.logging.v2.LogMetric.version", - index=7, + index=9, number=4, type=14, cpp_type=8, @@ -275,15 +312,15 @@ ), ], extensions=[], - nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY], - enum_types=[_LOGMETRIC_APIVERSION], + nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY,], + enum_types=[_LOGMETRIC_APIVERSION,], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=257, - serialized_end=690, + serialized_start=282, + serialized_end=813, ) @@ -357,8 +394,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=692, - serialized_end=770, + serialized_start=815, + serialized_end=893, ) @@ -414,8 +451,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=772, - serialized_end=868, + serialized_start=895, + serialized_end=991, ) @@ -443,7 +480,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -453,8 +490,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=870, - serialized_end=912, + serialized_start=993, + serialized_end=1035, ) @@ -510,8 +547,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=914, - serialized_end=1000, + serialized_start=1037, + serialized_end=1123, ) @@ -567,8 +604,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1002, - serialized_end=1093, + serialized_start=1125, + serialized_end=1216, ) @@ -596,7 +633,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -606,8 +643,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1095, - serialized_end=1140, + serialized_start=1218, + serialized_end=1263, ) _LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC @@ -620,6 +657,12 @@ _LOGMETRIC.fields_by_name[ "bucket_options" ].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION_BUCKETOPTIONS +_LOGMETRIC.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGMETRIC.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LOGMETRIC.fields_by_name["version"].enum_type = _LOGMETRIC_APIVERSION _LOGMETRIC_APIVERSION.containing_type = _LOGMETRIC _LISTLOGMETRICSRESPONSE.fields_by_name["metrics"].message_type = _LOGMETRIC @@ -675,7 +718,8 @@ project/metrics/nginx%2Frequests"``. description: Optional. A description of this metric, which is used in - documentation. + documentation. The maximum length of the description is 8000 + characters. filter: Required. An `advanced logs filter `__ which is used to @@ -739,6 +783,12 @@ based metric is using a DISTRIBUTION value type and it describes the bucket boundaries used to create a histogram of the extracted values. + create_time: + Output only. The creation timestamp of the metric. This field + may not be present for older metrics. + update_time: + Output only. The last update timestamp of the metric. This + field may not be present for older metrics. version: Deprecated. The API version that created or updated this metric. The v2 format is used by default and cannot be @@ -898,9 +948,11 @@ full_name="google.logging.v2.MetricsServiceV2", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1143, - serialized_end=1867, + serialized_options=_b( + "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" + ), + serialized_start=1266, + serialized_end=2262, methods=[ _descriptor.MethodDescriptor( name="ListLogMetrics", diff --git a/logging/google/cloud/logging_v2/proto/logging_pb2.py b/logging/google/cloud/logging_v2/proto/logging_pb2.py index 2438534d754b..79a73bd0f5fc 100644 --- a/logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging.proto @@ -16,17 +15,21 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import ( monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, ) from google.cloud.logging_v2.proto import ( log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2, ) +from google.cloud.logging_v2.proto import ( + logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2, +) from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -37,16 +40,18 @@ "\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd8\x07\n\x10LoggingServiceV2\x12\xeb\x01\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xa0\x01\x82\xd3\xe4\x93\x02\x99\x01* /v2/{log_name=projects/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logsB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x32google/cloud/logging_v2/proto/logging_config.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x85\n\n\x10LoggingServiceV2\x12\x88\x02\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xbd\x01\x82\xd3\xe4\x93\x02\xb6\x01* /v2/{log_name=projects/*/logs/*}Z\x1b*\x19/v2/{log_name=*/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logs\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR, + google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -75,7 +80,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -85,8 +90,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=299, - serialized_end=335, + serialized_start=376, + serialized_end=412, ) @@ -142,8 +147,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=590, - serialized_end=635, + serialized_start=667, + serialized_end=712, ) _WRITELOGENTRIESREQUEST = _descriptor.Descriptor( @@ -263,15 +268,15 @@ ), ], extensions=[], - nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY], + nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=338, - serialized_end=635, + serialized_start=415, + serialized_end=712, ) @@ -290,8 +295,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=637, - serialized_end=662, + serialized_start=714, + serialized_end=739, ) @@ -347,8 +352,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=792, - serialized_end=865, + serialized_start=869, + serialized_end=942, ) _WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( @@ -375,18 +380,18 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], - nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY], + nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=665, - serialized_end=865, + serialized_start=742, + serialized_end=942, ) @@ -514,8 +519,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=868, - serialized_end=1013, + serialized_start=945, + serialized_end=1090, ) @@ -571,8 +576,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1015, - serialized_end=1110, + serialized_start=1092, + serialized_end=1187, ) @@ -628,8 +633,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1112, - serialized_end=1192, + serialized_start=1189, + serialized_end=1269, ) @@ -685,8 +690,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1195, - serialized_end=1333, + serialized_start=1272, + serialized_end=1410, ) @@ -760,8 +765,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1335, - serialized_end=1407, + serialized_start=1412, + serialized_end=1484, ) @@ -817,8 +822,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1409, - serialized_end=1471, + serialized_start=1486, + serialized_end=1548, ) _WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST @@ -959,7 +964,7 @@ `logs retention period `__ in the past or more than 24 hours in the future will not be available when calling ``entries.list``. However, those log entries can still - be exported with `LogSinks `__. To improve throughput and to avoid exceeding the `quota limit `__ for calls to ``entries.write``, you should try to include several log @@ -1043,9 +1048,7 @@ project_ids: Deprecated. Use ``resource_names`` instead. One or more project identifiers or project numbers from which to retrieve - log entries. Example: ``"my-project-1A"``. If present, these - project identifiers are converted to resource name format and - added to the list of resources in ``resource_names``. + log entries. Example: ``"my-project-1A"``. resource_names: Required. Names of one or more parent resources from which to retrieve log entries: :: "projects/[PROJECT_ID]" @@ -1215,8 +1218,8 @@ Attributes: log_names: A list of log names. For example, ``"projects/my- - project/syslog"`` or ``"organizations/123/cloudresourcemanager - .googleapis.com%2Factivity"``. + project/logs/syslog"`` or ``"organizations/123/logs/cloudresou + rcemanager.googleapis.com%2Factivity"``. next_page_token: If there might be more results than those appearing in this response, then ``nextPageToken`` is included. To get the next @@ -1239,9 +1242,11 @@ full_name="google.logging.v2.LoggingServiceV2", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1474, - serialized_end=2458, + serialized_options=_b( + "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" + ), + serialized_start=1551, + serialized_end=2836, methods=[ _descriptor.MethodDescriptor( name="DeleteLog", @@ -1251,7 +1256,7 @@ input_type=_DELETELOGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002\231\001* /v2/{log_name=projects/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" + "\202\323\344\223\002\266\001* /v2/{log_name=projects/*/logs/*}Z\033*\031/v2/{log_name=*/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" ), ), _descriptor.MethodDescriptor( diff --git a/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py index d67dd2c95fd0..2a2b3656925c 100644 --- a/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py +++ b/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py @@ -72,9 +72,9 @@ def WriteLogEntries(self, request, context): raise NotImplementedError("Method not implemented!") def ListLogEntries(self, request, context): - """Lists log entries. Use this method to retrieve log entries from - Logging. For ways to export log entries, see - [Exporting Logs](/logging/docs/export). + """Lists log entries. Use this method to retrieve log entries that originated + from a project/folder/organization/billing account. For ways to export log + entries, see [Exporting Logs](/logging/docs/export). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/logging/setup.py b/logging/setup.py index 6d3da8a110f9..00c9cb388a12 100644 --- a/logging/setup.py +++ b/logging/setup.py @@ -22,15 +22,15 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.12.1' +version = '1.14.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ - 'google-api-core[grpc] >= 1.14.0, < 2.0.0dev', - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", ] extras = { } diff --git a/logging/synth.metadata b/logging/synth.metadata index 8d3384c18458..44367f98f48d 100644 --- a/logging/synth.metadata +++ b/logging/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-05T19:11:10.264038Z", + "updateTime": "2019-10-29T12:31:03.926658Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f94d89f4b75b4bdf6a254da44eb0f70d34bcca14", - "internalRef": "261701508" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/logging/synth.py b/logging/synth.py index c24382ce4d72..4364f387b4c9 100644 --- a/logging/synth.py +++ b/logging/synth.py @@ -30,13 +30,13 @@ include_protos=True, ) -s.move(library / "google/cloud/logging_v2/proto") +# the structure of the logging directory is a bit different, so manually copy the protos +s.move(library / "google/cloud/logging_v2/proto/cloud/logging_v2/proto", "google/cloud/logging_v2/proto") +s.move(library / "google/cloud/logging_v2/proto/*.proto") + s.move(library / "google/cloud/logging_v2/gapic") s.move(library / "tests/unit/gapic/v2") - -# Issues exist where python files should define the source encoding -# https://github.com/googleapis/gapic-generator/issues/2097 -s.replace("google/**/proto/*_pb2.py", r"(^.*$\n)*", r"# -*- coding: utf-8 -*-\n\g<0>") +s.move(library / "docs/gapic/v2") # ---------------------------------------------------------------------------- # Add templated files diff --git a/logging/tests/unit/handlers/transports/test_background_thread.py b/logging/tests/unit/handlers/transports/test_background_thread.py index 7de912560aa7..7edae8a7bfa0 100644 --- a/logging/tests/unit/handlers/transports/test_background_thread.py +++ b/logging/tests/unit/handlers/transports/test_background_thread.py @@ -43,7 +43,7 @@ def test_constructor(self): transport, worker = self._make_one(client, name) - logger, = worker.call_args[0] # call_args[0] is *args. + (logger,) = worker.call_args[0] # call_args[0] is *args. self.assertEqual(logger.name, name) def test_send(self): diff --git a/logging/tests/unit/test_client.py b/logging/tests/unit/test_client.py index 5acd736185fc..4e0b5ca22f0d 100644 --- a/logging/tests/unit/test_client.py +++ b/logging/tests/unit/test_client.py @@ -676,7 +676,7 @@ def test_setup_logging(self): self.assertEqual(len(mocked.mock_calls), 1) _, args, kwargs = mocked.mock_calls[0] - handler, = args + (handler,) = args self.assertIsInstance(handler, CloudLoggingHandler) handler.transport.worker.stop() @@ -710,7 +710,7 @@ def test_setup_logging_w_extra_kwargs(self): self.assertEqual(len(mocked.mock_calls), 1) _, args, kwargs = mocked.mock_calls[0] - handler, = args + (handler,) = args self.assertIsInstance(handler, CloudLoggingHandler) self.assertEqual(handler.name, name) self.assertEqual(handler.resource, resource) diff --git a/monitoring/docs/conf.py b/monitoring/docs/conf.py index 1827382440ff..3b9609a804e0 100644 --- a/monitoring/docs/conf.py +++ b/monitoring/docs/conf.py @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/monitoring/noxfile.py b/monitoring/noxfile.py index 63a11673d3eb..4d766d958892 100644 --- a/monitoring/noxfile.py +++ b/monitoring/noxfile.py @@ -122,7 +122,8 @@ def system(session): session.install("-e", ".") # Additional setup for VPCSC system tests - if os.environ.get("GOOGLE_CLOUD_TESTS_IN_VPCSC") != "true": + in_vpc = os.environ.get("GOOGLE_CLOUD_TESTS_IN_VPCSC", "false") + if in_vpc.lower() != "true": # Unset PROJECT_ID, since VPCSC system tests expect this to be a project # within the VPCSC perimeter. env = { diff --git a/monitoring/tests/system/test_vpcsc.py b/monitoring/tests/system/test_vpcsc_v3.py similarity index 71% rename from monitoring/tests/system/test_vpcsc.py rename to monitoring/tests/system/test_vpcsc_v3.py index ffc78cbfc047..6ef514fa2891 100644 --- a/monitoring/tests/system/test_vpcsc.py +++ b/monitoring/tests/system/test_vpcsc_v3.py @@ -1,12 +1,17 @@ # -*- coding: utf-8 -*- # +# flake8: noqa +# +# DO NOT MODIFY! THIS FILE IS AUTO-GENERATED. +# This file is auto-generated on 11 Oct 19 21:43 UTC. + # Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,22 +19,14 @@ # See the License for the specific language governing permissions and # limitations under the License. - -# DO NOT MODIFY! AUTO-GENERATED! -# This file is auto-generated on 2019-05-03. - -# flake8: noqa - import os import pytest - +import logging from google.api_core import exceptions from google.cloud import monitoring_v3 from google.cloud.monitoring_v3 import enums PROJECT_INSIDE = os.environ.get("PROJECT_ID", None) -if not PROJECT_INSIDE: - PROJECT_INSIDE = None PROJECT_OUTSIDE = os.environ.get( "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", None ) @@ -39,6 +36,7 @@ class TestVPCServiceControlV3(object): @staticmethod def _is_rejected(call): + logger = logging.getLogger(__name__) try: responses = call() @@ -47,8 +45,10 @@ def _is_rejected(call): # instance, or None. list(responses) except exceptions.PermissionDenied as e: + logger.debug(e) return e.message == "Request is prohibited by organization's policy" - except: + except Exception as e: + logger.debug(e) pass return False @@ -62,10 +62,10 @@ def _do_test(delayed_inside, delayed_outside): assert TestVPCServiceControlV3._is_rejected(delayed_inside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_create_alert_policy(self): @@ -74,13 +74,13 @@ def test_create_alert_policy(self): delayed_inside = lambda: client.create_alert_policy(name_inside, {}) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.create_alert_policy(name_outside, {}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_delete_alert_policy(self): @@ -89,13 +89,13 @@ def test_delete_alert_policy(self): delayed_inside = lambda: client.delete_alert_policy(name_inside) name_outside = client.alert_policy_path(PROJECT_OUTSIDE, "mock_alert_policy") delayed_outside = lambda: client.delete_alert_policy(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_alert_policy(self): @@ -104,13 +104,13 @@ def test_get_alert_policy(self): delayed_inside = lambda: client.get_alert_policy(name_inside) name_outside = client.alert_policy_path(PROJECT_OUTSIDE, "mock_alert_policy") delayed_outside = lambda: client.get_alert_policy(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_alert_policies(self): @@ -119,13 +119,13 @@ def test_list_alert_policies(self): delayed_inside = lambda: client.list_alert_policies(name_inside) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.list_alert_policies(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_update_alert_policy(self): @@ -134,13 +134,13 @@ def test_update_alert_policy(self): delayed_inside = lambda: client.update_alert_policy({"name": name_inside}) name_outside = client.alert_policy_path(PROJECT_OUTSIDE, "mock_alert_policy") delayed_outside = lambda: client.update_alert_policy({"name": name_outside}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_create_group(self): @@ -149,13 +149,13 @@ def test_create_group(self): delayed_inside = lambda: client.create_group(name_inside, {}) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.create_group(name_outside, {}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_delete_group(self): @@ -164,13 +164,13 @@ def test_delete_group(self): delayed_inside = lambda: client.delete_group(name_inside) name_outside = client.group_path(PROJECT_OUTSIDE, "mock_group") delayed_outside = lambda: client.delete_group(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_group(self): @@ -179,28 +179,28 @@ def test_get_group(self): delayed_inside = lambda: client.get_group(name_inside) name_outside = client.group_path(PROJECT_OUTSIDE, "mock_group") delayed_outside = lambda: client.get_group(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_group_members(self): client = monitoring_v3.GroupServiceClient() - name_inside = client.project_path(PROJECT_INSIDE) + name_inside = client.group_path(PROJECT_INSIDE, "mock_group") delayed_inside = lambda: client.list_group_members(name_inside) - name_outside = client.project_path(PROJECT_OUTSIDE) + name_outside = client.group_path(PROJECT_OUTSIDE, "mock_group") delayed_outside = lambda: client.list_group_members(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_groups(self): @@ -209,13 +209,13 @@ def test_list_groups(self): delayed_inside = lambda: client.list_groups(name_inside) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.list_groups(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_update_group(self): @@ -224,13 +224,13 @@ def test_update_group(self): delayed_inside = lambda: client.update_group({"name": name_inside}) name_outside = client.group_path(PROJECT_OUTSIDE, "mock_group") delayed_outside = lambda: client.update_group({"name": name_outside}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_create_metric_descriptor(self): @@ -239,13 +239,13 @@ def test_create_metric_descriptor(self): delayed_inside = lambda: client.create_metric_descriptor(name_inside, {}) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.create_metric_descriptor(name_outside, {}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_create_time_series(self): @@ -254,13 +254,13 @@ def test_create_time_series(self): delayed_inside = lambda: client.create_time_series(name_inside, {}) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.create_time_series(name_outside, {}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_delete_metric_descriptor(self): @@ -273,13 +273,13 @@ def test_delete_metric_descriptor(self): PROJECT_OUTSIDE, "mock_metric_descriptor" ) delayed_outside = lambda: client.delete_metric_descriptor(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_metric_descriptor(self): @@ -292,13 +292,13 @@ def test_get_metric_descriptor(self): PROJECT_OUTSIDE, "mock_metric_descriptor" ) delayed_outside = lambda: client.get_metric_descriptor(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_monitored_resource_descriptor(self): @@ -311,13 +311,13 @@ def test_get_monitored_resource_descriptor(self): PROJECT_OUTSIDE, "mock_monitored_resource_descriptor" ) delayed_outside = lambda: client.get_monitored_resource_descriptor(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_metric_descriptors(self): @@ -326,13 +326,13 @@ def test_list_metric_descriptors(self): delayed_inside = lambda: client.list_metric_descriptors(name_inside) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.list_metric_descriptors(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_monitored_resource_descriptors(self): @@ -343,13 +343,13 @@ def test_list_monitored_resource_descriptors(self): delayed_outside = lambda: client.list_monitored_resource_descriptors( name_outside ) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_time_series(self): @@ -362,13 +362,13 @@ def test_list_time_series(self): delayed_outside = lambda: client.list_time_series( name_outside, "", {}, enums.ListTimeSeriesRequest.TimeSeriesView.FULL ) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_create_notification_channel(self): @@ -377,13 +377,13 @@ def test_create_notification_channel(self): delayed_inside = lambda: client.create_notification_channel(name_inside, {}) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.create_notification_channel(name_outside, {}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_delete_notification_channel(self): @@ -396,13 +396,13 @@ def test_delete_notification_channel(self): PROJECT_OUTSIDE, "mock_notification_channel" ) delayed_outside = lambda: client.delete_notification_channel(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_notification_channel(self): @@ -415,13 +415,13 @@ def test_get_notification_channel(self): PROJECT_OUTSIDE, "mock_notification_channel" ) delayed_outside = lambda: client.get_notification_channel(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_notification_channel_descriptor(self): @@ -436,13 +436,36 @@ def test_get_notification_channel_descriptor(self): delayed_outside = lambda: client.get_notification_channel_descriptor( name_outside ) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" + ) + @pytest.mark.skipif( + not PROJECT_OUTSIDE, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_get_notification_channel_verification_code(self): + client = monitoring_v3.NotificationChannelServiceClient() + name_inside = client.notification_channel_path( + PROJECT_INSIDE, "mock_notification_channel" + ) + delayed_inside = lambda: client.get_notification_channel_verification_code( + name_inside + ) + name_outside = client.notification_channel_path( + PROJECT_OUTSIDE, "mock_notification_channel" + ) + delayed_outside = lambda: client.get_notification_channel_verification_code( + name_outside + ) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_notification_channel_descriptors(self): @@ -455,13 +478,13 @@ def test_list_notification_channel_descriptors(self): delayed_outside = lambda: client.list_notification_channel_descriptors( name_outside ) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_notification_channels(self): @@ -470,13 +493,36 @@ def test_list_notification_channels(self): delayed_inside = lambda: client.list_notification_channels(name_inside) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.list_notification_channels(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_send_notification_channel_verification_code(self): + client = monitoring_v3.NotificationChannelServiceClient() + name_inside = client.notification_channel_path( + PROJECT_INSIDE, "mock_notification_channel" + ) + delayed_inside = lambda: client.send_notification_channel_verification_code( + name_inside + ) + name_outside = client.notification_channel_path( + PROJECT_OUTSIDE, "mock_notification_channel" + ) + delayed_outside = lambda: client.send_notification_channel_verification_code( + name_outside + ) + self._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" + ) + @pytest.mark.skipif( + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_update_notification_channel(self): @@ -493,13 +539,32 @@ def test_update_notification_channel(self): delayed_outside = lambda: client.update_notification_channel( {"name": name_outside} ) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" + ) + @pytest.mark.skipif( + not PROJECT_OUTSIDE, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_verify_notification_channel(self): + client = monitoring_v3.NotificationChannelServiceClient() + name_inside = client.notification_channel_path( + PROJECT_INSIDE, "mock_notification_channel" + ) + delayed_inside = lambda: client.verify_notification_channel(name_inside, "") + name_outside = client.notification_channel_path( + PROJECT_OUTSIDE, "mock_notification_channel" + ) + delayed_outside = lambda: client.verify_notification_channel(name_outside, "") + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_create_uptime_check_config(self): @@ -508,13 +573,13 @@ def test_create_uptime_check_config(self): delayed_inside = lambda: client.create_uptime_check_config(name_inside, {}) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.create_uptime_check_config(name_outside, {}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_delete_uptime_check_config(self): @@ -527,13 +592,13 @@ def test_delete_uptime_check_config(self): PROJECT_OUTSIDE, "mock_uptime_check_config" ) delayed_outside = lambda: client.delete_uptime_check_config(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_uptime_check_config(self): @@ -546,13 +611,13 @@ def test_get_uptime_check_config(self): PROJECT_OUTSIDE, "mock_uptime_check_config" ) delayed_outside = lambda: client.get_uptime_check_config(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_uptime_check_configs(self): @@ -561,13 +626,13 @@ def test_list_uptime_check_configs(self): delayed_inside = lambda: client.list_uptime_check_configs(name_inside) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.list_uptime_check_configs(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_update_uptime_check_config(self): @@ -584,4 +649,4 @@ def test_update_uptime_check_config(self): delayed_outside = lambda: client.update_uptime_check_config( {"name": name_outside} ) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) diff --git a/noxfile.py b/noxfile.py deleted file mode 100644 index 573360929b63..000000000000 --- a/noxfile.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import nox - - -@nox.session(python='3.6') -def docs(session): - """Build the docs.""" - - # Install Sphinx and also all of the google-cloud-* packages. - session.chdir(os.path.realpath(os.path.dirname(__file__))) - session.install('-r', os.path.join('docs', 'requirements.txt')) - - # Build the docs! - session.run( - 'bash', os.path.join('.', 'test_utils', 'scripts', 'update_docs.sh')) - - -@nox.session(python='3.6') -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - - session.install('docutils', 'Pygments') - session.run( - 'python', 'legacy/google-cloud/setup.py', 'check', '--restructuredtext', '--strict') diff --git a/oslogin/docs/conf.py b/oslogin/docs/conf.py index e12461cf03fb..b36144820e0c 100644 --- a/oslogin/docs/conf.py +++ b/oslogin/docs/conf.py @@ -45,6 +45,7 @@ autodoc_default_flags = ["members"] autosummary_generate = True + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] @@ -121,6 +122,7 @@ # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True + # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -229,6 +231,7 @@ # -- Options for warnings ------------------------------------------------------ + suppress_warnings = [ # Temporarily suppress this to avoid "more than one target found for # cross-reference" warning, which are intractable for us to avoid while in @@ -284,6 +287,7 @@ # If false, no module index is generated. # latex_domain_indices = True + # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples @@ -301,6 +305,7 @@ # If true, show URL addresses after external links. # man_show_urls = False + # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples @@ -330,6 +335,7 @@ # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False + # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), @@ -338,11 +344,12 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } + # Napoleon settings napoleon_google_docstring = True napoleon_numpy_docstring = True diff --git a/oslogin/docs/gapic/v1/api.rst b/oslogin/docs/gapic/v1/api.rst index 29e3db95c426..25108a16994c 100644 --- a/oslogin/docs/gapic/v1/api.rst +++ b/oslogin/docs/gapic/v1/api.rst @@ -1,5 +1,5 @@ -Client for Google Cloud OS Login API -==================================== +Client for Cloud OS Login API +============================= .. automodule:: google.cloud.oslogin_v1 :members: diff --git a/oslogin/docs/gapic/v1/types.rst b/oslogin/docs/gapic/v1/types.rst index 25d3fd0f8a94..9694a7e3719f 100644 --- a/oslogin/docs/gapic/v1/types.rst +++ b/oslogin/docs/gapic/v1/types.rst @@ -1,5 +1,5 @@ -Types for Google Cloud OS Login API Client -========================================== +Types for Cloud OS Login API Client +=================================== .. automodule:: google.cloud.oslogin_v1.types :members: \ No newline at end of file diff --git a/oslogin/google/__init__.py b/oslogin/google/__init__.py index dd3a9f485275..8fcc60e2b9c6 100644 --- a/oslogin/google/__init__.py +++ b/oslogin/google/__init__.py @@ -1,4 +1,6 @@ -# Copyright 2018 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/oslogin/google/cloud/__init__.py b/oslogin/google/cloud/__init__.py index dd3a9f485275..8fcc60e2b9c6 100644 --- a/oslogin/google/cloud/__init__.py +++ b/oslogin/google/cloud/__init__.py @@ -1,4 +1,6 @@ -# Copyright 2018 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/oslogin/google/cloud/oslogin.py b/oslogin/google/cloud/oslogin.py index 3c44a48fc35c..416aad010a9b 100644 --- a/oslogin/google/cloud/oslogin.py +++ b/oslogin/google/cloud/oslogin.py @@ -1,4 +1,6 @@ -# Copyright 2018 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,9 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. + from __future__ import absolute_import from google.cloud.oslogin_v1 import OsLoginServiceClient +from google.cloud.oslogin_v1 import enums from google.cloud.oslogin_v1 import types -__all__ = ("types", "OsLoginServiceClient") + +__all__ = ("enums", "types", "OsLoginServiceClient") diff --git a/oslogin/google/cloud/oslogin_v1/__init__.py b/oslogin/google/cloud/oslogin_v1/__init__.py index 2691d28a9b40..83a5ac263537 100644 --- a/oslogin/google/cloud/oslogin_v1/__init__.py +++ b/oslogin/google/cloud/oslogin_v1/__init__.py @@ -18,11 +18,13 @@ from __future__ import absolute_import from google.cloud.oslogin_v1 import types +from google.cloud.oslogin_v1.gapic import enums from google.cloud.oslogin_v1.gapic import os_login_service_client class OsLoginServiceClient(os_login_service_client.OsLoginServiceClient): __doc__ = os_login_service_client.OsLoginServiceClient.__doc__ + enums = enums -__all__ = ("types", "OsLoginServiceClient") +__all__ = ("enums", "types", "OsLoginServiceClient") diff --git a/oslogin/google/cloud/oslogin_v1/gapic/enums.py b/oslogin/google/cloud/oslogin_v1/gapic/enums.py new file mode 100644 index 000000000000..ab555b005071 --- /dev/null +++ b/oslogin/google/cloud/oslogin_v1/gapic/enums.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class OperatingSystemType(enum.IntEnum): + """ + The operating system options for account entries. + + Attributes: + OPERATING_SYSTEM_TYPE_UNSPECIFIED (int): The operating system type associated with the user account information is + unspecified. + LINUX (int): Linux user account information. + WINDOWS (int): Windows user account information. + """ + + OPERATING_SYSTEM_TYPE_UNSPECIFIED = 0 + LINUX = 1 + WINDOWS = 2 diff --git a/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py b/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py index d19ee1de971a..ec496c53a918 100644 --- a/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py +++ b/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py @@ -29,6 +29,7 @@ import google.api_core.path_template import grpc +from google.cloud.oslogin_v1.gapic import enums from google.cloud.oslogin_v1.gapic import os_login_service_client_config from google.cloud.oslogin_v1.gapic.transports import os_login_service_grpc_transport from google.cloud.oslogin_v1.proto import common_pb2 @@ -77,19 +78,19 @@ def from_service_account_file(cls, filename, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def fingerprint_path(cls, user, fingerprint): - """Return a fully-qualified fingerprint string.""" + def posix_account_path(cls, user, project): + """Return a fully-qualified posix_account string.""" return google.api_core.path_template.expand( - "users/{user}/sshPublicKeys/{fingerprint}", - user=user, - fingerprint=fingerprint, + "users/{user}/projects/{project}", user=user, project=project ) @classmethod - def project_path(cls, user, project): - """Return a fully-qualified project string.""" + def ssh_public_key_path(cls, user, fingerprint): + """Return a fully-qualified ssh_public_key string.""" return google.api_core.path_template.expand( - "users/{user}/projects/{project}", user=user, project=project + "users/{user}/sshPublicKeys/{fingerprint}", + user=user, + fingerprint=fingerprint, ) @classmethod @@ -225,12 +226,12 @@ def delete_posix_account( >>> >>> client = oslogin_v1.OsLoginServiceClient() >>> - >>> name = client.project_path('[USER]', '[PROJECT]') + >>> name = client.posix_account_path('[USER]', '[PROJECT]') >>> >>> client.delete_posix_account(name) Args: - name (str): A reference to the POSIX account to update. POSIX accounts are + name (str): Required. A reference to the POSIX account to update. POSIX accounts are identified by the project ID they are associated with. A reference to the POSIX account is in format ``users/{user}/projects/{project}``. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -293,14 +294,14 @@ def delete_ssh_public_key( >>> >>> client = oslogin_v1.OsLoginServiceClient() >>> - >>> name = client.fingerprint_path('[USER]', '[FINGERPRINT]') + >>> name = client.ssh_public_key_path('[USER]', '[FINGERPRINT]') >>> >>> client.delete_ssh_public_key(name) Args: - name (str): The fingerprint of the public key to update. Public keys are identified - by their SHA-256 fingerprint. The fingerprint of the public key is in - format ``users/{user}/sshPublicKeys/{fingerprint}``. + name (str): Required. The fingerprint of the public key to update. Public keys are + identified by their SHA-256 fingerprint. The fingerprint of the public + key is in format ``users/{user}/sshPublicKeys/{fingerprint}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -349,6 +350,8 @@ def delete_ssh_public_key( def get_login_profile( self, name, + project_id=None, + system_id=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -367,7 +370,9 @@ def get_login_profile( >>> response = client.get_login_profile(name) Args: - name (str): The unique ID for the user in format ``users/{user}``. + name (str): Required. The unique ID for the user in format ``users/{user}``. + project_id (str): The project ID of the Google Cloud Platform project. + system_id (str): A system ID for filtering the results of the request. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -398,7 +403,9 @@ def get_login_profile( client_info=self._client_info, ) - request = oslogin_pb2.GetLoginProfileRequest(name=name) + request = oslogin_pb2.GetLoginProfileRequest( + name=name, project_id=project_id, system_id=system_id + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -431,12 +438,12 @@ def get_ssh_public_key( >>> >>> client = oslogin_v1.OsLoginServiceClient() >>> - >>> name = client.fingerprint_path('[USER]', '[FINGERPRINT]') + >>> name = client.ssh_public_key_path('[USER]', '[FINGERPRINT]') >>> >>> response = client.get_ssh_public_key(name) Args: - name (str): The fingerprint of the public key to retrieve. Public keys are + name (str): Required. The fingerprint of the public key to retrieve. Public keys are identified by their SHA-256 fingerprint. The fingerprint of the public key is in format ``users/{user}/sshPublicKeys/{fingerprint}``. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -490,7 +497,7 @@ def get_ssh_public_key( def import_ssh_public_key( self, parent, - ssh_public_key, + ssh_public_key=None, project_id=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, @@ -508,14 +515,11 @@ def import_ssh_public_key( >>> >>> parent = client.user_path('[USER]') >>> - >>> # TODO: Initialize `ssh_public_key`: - >>> ssh_public_key = {} - >>> - >>> response = client.import_ssh_public_key(parent, ssh_public_key) + >>> response = client.import_ssh_public_key(parent) Args: - parent (str): The unique ID for the user in format ``users/{user}``. - ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): The SSH public key and expiration time. + parent (str): Required. The unique ID for the user in format ``users/{user}``. + ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): Optional. The SSH public key and expiration time. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.oslogin_v1.types.SshPublicKey` @@ -588,7 +592,7 @@ def update_ssh_public_key( >>> >>> client = oslogin_v1.OsLoginServiceClient() >>> - >>> name = client.fingerprint_path('[USER]', '[FINGERPRINT]') + >>> name = client.ssh_public_key_path('[USER]', '[FINGERPRINT]') >>> >>> # TODO: Initialize `ssh_public_key`: >>> ssh_public_key = {} @@ -596,10 +600,10 @@ def update_ssh_public_key( >>> response = client.update_ssh_public_key(name, ssh_public_key) Args: - name (str): The fingerprint of the public key to update. Public keys are identified - by their SHA-256 fingerprint. The fingerprint of the public key is in - format ``users/{user}/sshPublicKeys/{fingerprint}``. - ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): The SSH public key and expiration time. + name (str): Required. The fingerprint of the public key to update. Public keys are + identified by their SHA-256 fingerprint. The fingerprint of the public + key is in format ``users/{user}/sshPublicKeys/{fingerprint}``. + ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): Required. The SSH public key and expiration time. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.oslogin_v1.types.SshPublicKey` diff --git a/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client_config.py b/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client_config.py index e55e7963358b..6d64718728d7 100644 --- a/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client_config.py +++ b/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client_config.py @@ -10,41 +10,41 @@ "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 10000, + "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 10000, + "max_rpc_timeout_millis": 20000, "total_timeout_millis": 600000, } }, "methods": { "DeletePosixAccount": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "DeleteSshPublicKey": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "GetLoginProfile": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "GetSshPublicKey": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "ImportSshPublicKey": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "UpdateSshPublicKey": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, }, diff --git a/oslogin/google/cloud/oslogin_v1/gapic/transports/os_login_service_grpc_transport.py b/oslogin/google/cloud/oslogin_v1/gapic/transports/os_login_service_grpc_transport.py index 6414cf27a0da..2860dfe42952 100644 --- a/oslogin/google/cloud/oslogin_v1/gapic/transports/os_login_service_grpc_transport.py +++ b/oslogin/google/cloud/oslogin_v1/gapic/transports/os_login_service_grpc_transport.py @@ -33,9 +33,7 @@ class OsLoginServiceGrpcTransport(object): # in this service. _OAUTH_SCOPES = ( "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/compute.readonly", ) def __init__( diff --git a/oslogin/google/cloud/oslogin_v1/proto/common_pb2.py b/oslogin/google/cloud/oslogin_v1/proto/common_pb2.py index 7a81f9e4a322..c842ef79bc1d 100644 --- a/oslogin/google/cloud/oslogin_v1/proto/common_pb2.py +++ b/oslogin/google/cloud/oslogin_v1/proto/common_pb2.py @@ -1,32 +1,72 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/oslogin/common/common.proto import sys _b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/oslogin/common/common.proto", package="google.cloud.oslogin.common", syntax="proto3", + serialized_options=_b( + "\n\037com.google.cloud.oslogin.commonB\014OsLoginProtoZAgoogle.golang.org/genproto/googleapis/cloud/oslogin/common;common\252\002\033Google.Cloud.OsLogin.Common\312\002\033Google\\Cloud\\OsLogin\\Common\352A+\n\033oslogin.googleapis.com/User\022\014users/{user}" + ), serialized_pb=_b( - '\n(google/cloud/oslogin/common/common.proto\x12\x1bgoogle.cloud.oslogin.common\x1a\x1cgoogle/api/annotations.proto"\xa8\x01\n\x0cPosixAccount\x12\x0f\n\x07primary\x18\x01 \x01(\x08\x12\x10\n\x08username\x18\x02 \x01(\t\x12\x0b\n\x03uid\x18\x03 \x01(\x03\x12\x0b\n\x03gid\x18\x04 \x01(\x03\x12\x16\n\x0ehome_directory\x18\x05 \x01(\t\x12\r\n\x05shell\x18\x06 \x01(\t\x12\r\n\x05gecos\x18\x07 \x01(\t\x12\x11\n\tsystem_id\x18\x08 \x01(\t\x12\x12\n\naccount_id\x18\t \x01(\t"N\n\x0cSshPublicKey\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x14\x65xpiration_time_usec\x18\x02 \x01(\x03\x12\x13\n\x0b\x66ingerprint\x18\x03 \x01(\tB\xae\x01\n\x1f\x63om.google.cloud.oslogin.commonB\x0cOsLoginProtoZAgoogle.golang.org/genproto/googleapis/cloud/oslogin/common;common\xaa\x02\x1bGoogle.Cloud.OsLogin.Common\xca\x02\x1bGoogle\\Cloud\\OsLogin\\Commonb\x06proto3' + '\n(google/cloud/oslogin/common/common.proto\x12\x1bgoogle.cloud.oslogin.common\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto"\xdc\x02\n\x0cPosixAccount\x12\x0f\n\x07primary\x18\x01 \x01(\x08\x12\x10\n\x08username\x18\x02 \x01(\t\x12\x0b\n\x03uid\x18\x03 \x01(\x03\x12\x0b\n\x03gid\x18\x04 \x01(\x03\x12\x16\n\x0ehome_directory\x18\x05 \x01(\t\x12\r\n\x05shell\x18\x06 \x01(\t\x12\r\n\x05gecos\x18\x07 \x01(\t\x12\x11\n\tsystem_id\x18\x08 \x01(\t\x12\x17\n\naccount_id\x18\t \x01(\tB\x03\xe0\x41\x03\x12O\n\x15operating_system_type\x18\n \x01(\x0e\x32\x30.google.cloud.oslogin.common.OperatingSystemType\x12\x11\n\x04name\x18\x0b \x01(\tB\x03\xe0\x41\x03:I\xea\x41\x46\n#oslogin.googleapis.com/PosixAccount\x12\x1fusers/{user}/projects/{project}"\xba\x01\n\x0cSshPublicKey\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x14\x65xpiration_time_usec\x18\x02 \x01(\x03\x12\x18\n\x0b\x66ingerprint\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04name\x18\x04 \x01(\tB\x03\xe0\x41\x03:R\xea\x41O\n#oslogin.googleapis.com/SshPublicKey\x12(users/{user}/sshPublicKeys/{fingerprint}*T\n\x13OperatingSystemType\x12%\n!OPERATING_SYSTEM_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05LINUX\x10\x01\x12\x0b\n\x07WINDOWS\x10\x02\x42\xdc\x01\n\x1f\x63om.google.cloud.oslogin.commonB\x0cOsLoginProtoZAgoogle.golang.org/genproto/googleapis/cloud/oslogin/common;common\xaa\x02\x1bGoogle.Cloud.OsLogin.Common\xca\x02\x1bGoogle\\Cloud\\OsLogin\\Common\xea\x41+\n\x1boslogin.googleapis.com/User\x12\x0cusers/{user}b\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + ], +) + +_OPERATINGSYSTEMTYPE = _descriptor.EnumDescriptor( + name="OperatingSystemType", + full_name="google.cloud.oslogin.common.OperatingSystemType", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="OPERATING_SYSTEM_TYPE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="LINUX", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="WINDOWS", index=2, number=2, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=673, + serialized_end=757, ) +_sym_db.RegisterEnumDescriptor(_OPERATINGSYSTEMTYPE) + +OperatingSystemType = enum_type_wrapper.EnumTypeWrapper(_OPERATINGSYSTEMTYPE) +OPERATING_SYSTEM_TYPE_UNSPECIFIED = 0 +LINUX = 1 +WINDOWS = 2 _POSIXACCOUNT = _descriptor.Descriptor( @@ -51,7 +91,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="username", @@ -68,7 +109,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="uid", @@ -85,7 +127,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="gid", @@ -102,7 +145,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="home_directory", @@ -119,7 +163,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="shell", @@ -136,7 +181,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="gecos", @@ -153,7 +199,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="system_id", @@ -170,7 +217,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="account_id", @@ -187,19 +235,58 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="operating_system_type", + full_name="google.cloud.oslogin.common.PosixAccount.operating_system_type", + index=9, + number=10, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.oslogin.common.PosixAccount.name", + index=10, + number=11, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=_b( + "\352AF\n#oslogin.googleapis.com/PosixAccount\022\037users/{user}/projects/{project}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=104, - serialized_end=272, + serialized_start=134, + serialized_end=482, ) @@ -225,7 +312,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="expiration_time_usec", @@ -242,7 +330,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="fingerprint", @@ -259,23 +348,46 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.oslogin.common.SshPublicKey.name", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=_b( + "\352AO\n#oslogin.googleapis.com/SshPublicKey\022(users/{user}/sshPublicKeys/{fingerprint}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=274, - serialized_end=352, + serialized_start=485, + serialized_end=671, ) +_POSIXACCOUNT.fields_by_name["operating_system_type"].enum_type = _OPERATINGSYSTEMTYPE DESCRIPTOR.message_types_by_name["PosixAccount"] = _POSIXACCOUNT DESCRIPTOR.message_types_by_name["SshPublicKey"] = _SSHPUBLICKEY +DESCRIPTOR.enum_types_by_name["OperatingSystemType"] = _OPERATINGSYSTEMTYPE _sym_db.RegisterFileDescriptor(DESCRIPTOR) PosixAccount = _reflection.GeneratedProtocolMessageType( @@ -307,6 +419,10 @@ applies to. By default, the empty value is used. account_id: Output only. A POSIX account identifier. + operating_system_type: + The operating system type where this account applies. + name: + Output only. The canonical resource name. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.common.PosixAccount) ), @@ -329,6 +445,8 @@ An expiration time in microseconds since epoch. fingerprint: Output only. The SHA-256 fingerprint of the SSH public key. + name: + Output only. The canonical resource name. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.common.SshPublicKey) ), @@ -336,11 +454,11 @@ _sym_db.RegisterMessage(SshPublicKey) -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions( - descriptor_pb2.FileOptions(), - _b( - "\n\037com.google.cloud.oslogin.commonB\014OsLoginProtoZAgoogle.golang.org/genproto/googleapis/cloud/oslogin/common;common\252\002\033Google.Cloud.OsLogin.Common\312\002\033Google\\Cloud\\OsLogin\\Common" - ), -) +DESCRIPTOR._options = None +_POSIXACCOUNT.fields_by_name["account_id"]._options = None +_POSIXACCOUNT.fields_by_name["name"]._options = None +_POSIXACCOUNT._options = None +_SSHPUBLICKEY.fields_by_name["fingerprint"]._options = None +_SSHPUBLICKEY.fields_by_name["name"]._options = None +_SSHPUBLICKEY._options = None # @@protoc_insertion_point(module_scope) diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin.proto b/oslogin/google/cloud/oslogin_v1/proto/oslogin.proto index d76bd47e650c..75d7c060518b 100644 --- a/oslogin/google/cloud/oslogin_v1/proto/oslogin.proto +++ b/oslogin/google/cloud/oslogin_v1/proto/oslogin.proto @@ -1,4 +1,4 @@ -// Copyright 2017 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,12 +11,16 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.oslogin.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/oslogin/common/common.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; @@ -33,20 +37,25 @@ option php_namespace = "Google\\Cloud\\OsLogin\\V1"; // The Cloud OS Login API allows you to manage users and their associated SSH // public keys for logging into virtual machines on Google Cloud Platform. service OsLoginService { + option (google.api.default_host) = "oslogin.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/compute"; + // Deletes a POSIX account. - rpc DeletePosixAccount(DeletePosixAccountRequest) - returns (google.protobuf.Empty) { + rpc DeletePosixAccount(DeletePosixAccountRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1/{name=users/*/projects/*}" }; + option (google.api.method_signature) = "name"; } // Deletes an SSH public key. - rpc DeleteSshPublicKey(DeleteSshPublicKeyRequest) - returns (google.protobuf.Empty) { + rpc DeleteSshPublicKey(DeleteSshPublicKeyRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1/{name=users/*/sshPublicKeys/*}" }; + option (google.api.method_signature) = "name"; } // Retrieves the profile information used for logging in to a virtual machine @@ -55,92 +64,122 @@ service OsLoginService { option (google.api.http) = { get: "/v1/{name=users/*}/loginProfile" }; + option (google.api.method_signature) = "name"; } // Retrieves an SSH public key. - rpc GetSshPublicKey(GetSshPublicKeyRequest) - returns (google.cloud.oslogin.common.SshPublicKey) { + rpc GetSshPublicKey(GetSshPublicKeyRequest) returns (google.cloud.oslogin.common.SshPublicKey) { option (google.api.http) = { get: "/v1/{name=users/*/sshPublicKeys/*}" }; + option (google.api.method_signature) = "name"; } // Adds an SSH public key and returns the profile information. Default POSIX // account information is set when no username and UID exist as part of the // login profile. - rpc ImportSshPublicKey(ImportSshPublicKeyRequest) - returns (ImportSshPublicKeyResponse) { + rpc ImportSshPublicKey(ImportSshPublicKeyRequest) returns (ImportSshPublicKeyResponse) { option (google.api.http) = { post: "/v1/{parent=users/*}:importSshPublicKey" body: "ssh_public_key" }; + option (google.api.method_signature) = "parent,ssh_public_key"; + option (google.api.method_signature) = "parent,ssh_public_key,project_id"; } // Updates an SSH public key and returns the profile information. This method // supports patch semantics. - rpc UpdateSshPublicKey(UpdateSshPublicKeyRequest) - returns (google.cloud.oslogin.common.SshPublicKey) { + rpc UpdateSshPublicKey(UpdateSshPublicKeyRequest) returns (google.cloud.oslogin.common.SshPublicKey) { option (google.api.http) = { patch: "/v1/{name=users/*/sshPublicKeys/*}" body: "ssh_public_key" }; + option (google.api.method_signature) = "name,ssh_public_key"; + option (google.api.method_signature) = "name,ssh_public_key,update_mask"; } } // The user profile information used for logging in to a virtual machine on // Google Compute Engine. message LoginProfile { - // The primary email address that uniquely identifies the user. - string name = 1; + // Required. A unique user ID. + string name = 1 [(google.api.field_behavior) = REQUIRED]; // The list of POSIX accounts associated with the user. repeated google.cloud.oslogin.common.PosixAccount posix_accounts = 2; // A map from SSH public key fingerprint to the associated key object. map ssh_public_keys = 3; - - // Indicates if the user is suspended. A suspended user cannot log in but - // their profile information is retained. - bool suspended = 4; } // A request message for deleting a POSIX account entry. message DeletePosixAccountRequest { - // A reference to the POSIX account to update. POSIX accounts are identified + // Required. A reference to the POSIX account to update. POSIX accounts are identified // by the project ID they are associated with. A reference to the POSIX // account is in format `users/{user}/projects/{project}`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "oslogin.googleapis.com/PosixAccount" + } + ]; } // A request message for deleting an SSH public key. message DeleteSshPublicKeyRequest { - // The fingerprint of the public key to update. Public keys are identified by + // Required. The fingerprint of the public key to update. Public keys are identified by // their SHA-256 fingerprint. The fingerprint of the public key is in format // `users/{user}/sshPublicKeys/{fingerprint}`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "oslogin.googleapis.com/SshPublicKey" + } + ]; } // A request message for retrieving the login profile information for a user. message GetLoginProfileRequest { - // The unique ID for the user in format `users/{user}`. - string name = 1; + // Required. The unique ID for the user in format `users/{user}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "oslogin.googleapis.com/PosixAccount" + } + ]; + + // The project ID of the Google Cloud Platform project. + string project_id = 2; + + // A system ID for filtering the results of the request. + string system_id = 3; } // A request message for retrieving an SSH public key. message GetSshPublicKeyRequest { - // The fingerprint of the public key to retrieve. Public keys are identified + // Required. The fingerprint of the public key to retrieve. Public keys are identified // by their SHA-256 fingerprint. The fingerprint of the public key is in // format `users/{user}/sshPublicKeys/{fingerprint}`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "oslogin.googleapis.com/SshPublicKey" + } + ]; } // A request message for importing an SSH public key. message ImportSshPublicKeyRequest { - // The unique ID for the user in format `users/{user}`. - string parent = 1; + // Required. The unique ID for the user in format `users/{user}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "oslogin.googleapis.com/SshPublicKey" + } + ]; - // The SSH public key and expiration time. - google.cloud.oslogin.common.SshPublicKey ssh_public_key = 2; + // Optional. The SSH public key and expiration time. + google.cloud.oslogin.common.SshPublicKey ssh_public_key = 2 [(google.api.field_behavior) = OPTIONAL]; // The project ID of the Google Cloud Platform project. string project_id = 3; @@ -154,13 +193,18 @@ message ImportSshPublicKeyResponse { // A request message for updating an SSH public key. message UpdateSshPublicKeyRequest { - // The fingerprint of the public key to update. Public keys are identified by + // Required. The fingerprint of the public key to update. Public keys are identified by // their SHA-256 fingerprint. The fingerprint of the public key is in format // `users/{user}/sshPublicKeys/{fingerprint}`. - string name = 1; - - // The SSH public key and expiration time. - google.cloud.oslogin.common.SshPublicKey ssh_public_key = 2; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "oslogin.googleapis.com/SshPublicKey" + } + ]; + + // Required. The SSH public key and expiration time. + google.cloud.oslogin.common.SshPublicKey ssh_public_key = 2 [(google.api.field_behavior) = REQUIRED]; // Mask to control which fields get updated. Updates all if not present. google.protobuf.FieldMask update_mask = 3; diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin/common/common_pb2.py b/oslogin/google/cloud/oslogin_v1/proto/oslogin/common/common_pb2.py deleted file mode 100644 index 93acf477103a..000000000000 --- a/oslogin/google/cloud/oslogin_v1/proto/oslogin/common/common_pb2.py +++ /dev/null @@ -1,355 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/oslogin/common/common.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/oslogin/common/common.proto", - package="google.cloud.oslogin.common", - syntax="proto3", - serialized_options=_b( - "\n\037com.google.cloud.oslogin.commonB\014OsLoginProtoZAgoogle.golang.org/genproto/googleapis/cloud/oslogin/common;common\252\002\033Google.Cloud.OsLogin.Common\312\002\033Google\\Cloud\\OsLogin\\Common" - ), - serialized_pb=_b( - '\n(google/cloud/oslogin/common/common.proto\x12\x1bgoogle.cloud.oslogin.common\x1a\x1cgoogle/api/annotations.proto"\xa8\x01\n\x0cPosixAccount\x12\x0f\n\x07primary\x18\x01 \x01(\x08\x12\x10\n\x08username\x18\x02 \x01(\t\x12\x0b\n\x03uid\x18\x03 \x01(\x03\x12\x0b\n\x03gid\x18\x04 \x01(\x03\x12\x16\n\x0ehome_directory\x18\x05 \x01(\t\x12\r\n\x05shell\x18\x06 \x01(\t\x12\r\n\x05gecos\x18\x07 \x01(\t\x12\x11\n\tsystem_id\x18\x08 \x01(\t\x12\x12\n\naccount_id\x18\t \x01(\t"N\n\x0cSshPublicKey\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x14\x65xpiration_time_usec\x18\x02 \x01(\x03\x12\x13\n\x0b\x66ingerprint\x18\x03 \x01(\tB\xae\x01\n\x1f\x63om.google.cloud.oslogin.commonB\x0cOsLoginProtoZAgoogle.golang.org/genproto/googleapis/cloud/oslogin/common;common\xaa\x02\x1bGoogle.Cloud.OsLogin.Common\xca\x02\x1bGoogle\\Cloud\\OsLogin\\Commonb\x06proto3' - ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], -) - - -_POSIXACCOUNT = _descriptor.Descriptor( - name="PosixAccount", - full_name="google.cloud.oslogin.common.PosixAccount", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="primary", - full_name="google.cloud.oslogin.common.PosixAccount.primary", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="username", - full_name="google.cloud.oslogin.common.PosixAccount.username", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="uid", - full_name="google.cloud.oslogin.common.PosixAccount.uid", - index=2, - number=3, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="gid", - full_name="google.cloud.oslogin.common.PosixAccount.gid", - index=3, - number=4, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="home_directory", - full_name="google.cloud.oslogin.common.PosixAccount.home_directory", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="shell", - full_name="google.cloud.oslogin.common.PosixAccount.shell", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="gecos", - full_name="google.cloud.oslogin.common.PosixAccount.gecos", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="system_id", - full_name="google.cloud.oslogin.common.PosixAccount.system_id", - index=7, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="account_id", - full_name="google.cloud.oslogin.common.PosixAccount.account_id", - index=8, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=104, - serialized_end=272, -) - - -_SSHPUBLICKEY = _descriptor.Descriptor( - name="SshPublicKey", - full_name="google.cloud.oslogin.common.SshPublicKey", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.oslogin.common.SshPublicKey.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="expiration_time_usec", - full_name="google.cloud.oslogin.common.SshPublicKey.expiration_time_usec", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fingerprint", - full_name="google.cloud.oslogin.common.SshPublicKey.fingerprint", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=274, - serialized_end=352, -) - -DESCRIPTOR.message_types_by_name["PosixAccount"] = _POSIXACCOUNT -DESCRIPTOR.message_types_by_name["SshPublicKey"] = _SSHPUBLICKEY -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -PosixAccount = _reflection.GeneratedProtocolMessageType( - "PosixAccount", - (_message.Message,), - dict( - DESCRIPTOR=_POSIXACCOUNT, - __module__="google.cloud.oslogin.common.common_pb2", - __doc__="""The POSIX account information associated with a Google account. - - - Attributes: - primary: - Only one POSIX account can be marked as primary. - username: - The username of the POSIX account. - uid: - The user ID. - gid: - The default group ID. - home_directory: - The path to the home directory for this account. - shell: - The path to the logic shell for this account. - gecos: - The GECOS (user information) entry for this account. - system_id: - System identifier for which account the username or uid - applies to. By default, the empty value is used. - account_id: - Output only. A POSIX account identifier. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.common.PosixAccount) - ), -) -_sym_db.RegisterMessage(PosixAccount) - -SshPublicKey = _reflection.GeneratedProtocolMessageType( - "SshPublicKey", - (_message.Message,), - dict( - DESCRIPTOR=_SSHPUBLICKEY, - __module__="google.cloud.oslogin.common.common_pb2", - __doc__="""The SSH public key information associated with a Google account. - - - Attributes: - key: - Public key text in SSH format, defined by RFC4253 section 6.6. - expiration_time_usec: - An expiration time in microseconds since epoch. - fingerprint: - Output only. The SHA-256 fingerprint of the SSH public key. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.common.SshPublicKey) - ), -) -_sym_db.RegisterMessage(SshPublicKey) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin_pb2.py b/oslogin/google/cloud/oslogin_v1/proto/oslogin_pb2.py index 3eb27bda6a64..0feaf4be9ae9 100644 --- a/oslogin/google/cloud/oslogin_v1/proto/oslogin_pb2.py +++ b/oslogin/google/cloud/oslogin_v1/proto/oslogin_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/oslogin_v1/proto/oslogin.proto @@ -15,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.oslogin_v1.proto import ( common_pb2 as google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2, ) @@ -30,10 +34,13 @@ "\n\033com.google.cloud.oslogin.v1B\014OsLoginProtoP\001Z>google.golang.org/genproto/googleapis/cloud/oslogin/v1;oslogin\252\002\027Google.Cloud.OsLogin.V1\312\002\027Google\\Cloud\\OsLogin\\V1" ), serialized_pb=_b( - '\n+google/cloud/oslogin_v1/proto/oslogin.proto\x12\x17google.cloud.oslogin.v1\x1a\x1cgoogle/api/annotations.proto\x1a(google/cloud/oslogin/common/common.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\xa6\x02\n\x0cLoginProfile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x41\n\x0eposix_accounts\x18\x02 \x03(\x0b\x32).google.cloud.oslogin.common.PosixAccount\x12Q\n\x0fssh_public_keys\x18\x03 \x03(\x0b\x32\x38.google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry\x12\x11\n\tsuspended\x18\x04 \x01(\x08\x1a_\n\x12SshPublicKeysEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey:\x02\x38\x01")\n\x19\x44\x65letePosixAccountRequest\x12\x0c\n\x04name\x18\x01 \x01(\t")\n\x19\x44\x65leteSshPublicKeyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"&\n\x16GetLoginProfileRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"&\n\x16GetSshPublicKeyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x82\x01\n\x19ImportSshPublicKeyRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x41\n\x0essh_public_key\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey\x12\x12\n\nproject_id\x18\x03 \x01(\t"Z\n\x1aImportSshPublicKeyResponse\x12<\n\rlogin_profile\x18\x01 \x01(\x0b\x32%.google.cloud.oslogin.v1.LoginProfile"\x9d\x01\n\x19UpdateSshPublicKeyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x41\n\x0essh_public_key\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask2\xcd\x07\n\x0eOsLoginService\x12\x87\x01\n\x12\x44\x65letePosixAccount\x12\x32.google.cloud.oslogin.v1.DeletePosixAccountRequest\x1a\x16.google.protobuf.Empty"%\x82\xd3\xe4\x93\x02\x1f*\x1d/v1/{name=users/*/projects/*}\x12\x8c\x01\n\x12\x44\x65leteSshPublicKey\x12\x32.google.cloud.oslogin.v1.DeleteSshPublicKeyRequest\x1a\x16.google.protobuf.Empty"*\x82\xd3\xe4\x93\x02$*"/v1/{name=users/*/sshPublicKeys/*}\x12\x92\x01\n\x0fGetLoginProfile\x12/.google.cloud.oslogin.v1.GetLoginProfileRequest\x1a%.google.cloud.oslogin.v1.LoginProfile"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{name=users/*}/loginProfile\x12\x99\x01\n\x0fGetSshPublicKey\x12/.google.cloud.oslogin.v1.GetSshPublicKeyRequest\x1a).google.cloud.oslogin.common.SshPublicKey"*\x82\xd3\xe4\x93\x02$\x12"/v1/{name=users/*/sshPublicKeys/*}\x12\xbe\x01\n\x12ImportSshPublicKey\x12\x32.google.cloud.oslogin.v1.ImportSshPublicKeyRequest\x1a\x33.google.cloud.oslogin.v1.ImportSshPublicKeyResponse"?\x82\xd3\xe4\x93\x02\x39"\'/v1/{parent=users/*}:importSshPublicKey:\x0essh_public_key\x12\xaf\x01\n\x12UpdateSshPublicKey\x12\x32.google.cloud.oslogin.v1.UpdateSshPublicKeyRequest\x1a).google.cloud.oslogin.common.SshPublicKey":\x82\xd3\xe4\x93\x02\x34\x32"/v1/{name=users/*/sshPublicKeys/*}:\x0essh_public_keyB\xa1\x01\n\x1b\x63om.google.cloud.oslogin.v1B\x0cOsLoginProtoP\x01Z>google.golang.org/genproto/googleapis/cloud/oslogin/v1;oslogin\xaa\x02\x17Google.Cloud.OsLogin.V1\xca\x02\x17Google\\Cloud\\OsLogin\\V1b\x06proto3' + '\n+google/cloud/oslogin_v1/proto/oslogin.proto\x12\x17google.cloud.oslogin.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a(google/cloud/oslogin/common/common.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x98\x02\n\x0cLoginProfile\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\x0eposix_accounts\x18\x02 \x03(\x0b\x32).google.cloud.oslogin.common.PosixAccount\x12Q\n\x0fssh_public_keys\x18\x03 \x03(\x0b\x32\x38.google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry\x1a_\n\x12SshPublicKeysEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey:\x02\x38\x01"V\n\x19\x44\x65letePosixAccountRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\n#oslogin.googleapis.com/PosixAccount"V\n\x19\x44\x65leteSshPublicKeyRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\n#oslogin.googleapis.com/SshPublicKey"z\n\x16GetLoginProfileRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\x12#oslogin.googleapis.com/PosixAccount\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x11\n\tsystem_id\x18\x03 \x01(\t"S\n\x16GetSshPublicKeyRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\n#oslogin.googleapis.com/SshPublicKey"\xb4\x01\n\x19ImportSshPublicKeyRequest\x12;\n\x06parent\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\x12#oslogin.googleapis.com/SshPublicKey\x12\x46\n\x0essh_public_key\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKeyB\x03\xe0\x41\x01\x12\x12\n\nproject_id\x18\x03 \x01(\t"Z\n\x1aImportSshPublicKeyResponse\x12<\n\rlogin_profile\x18\x01 \x01(\x0b\x32%.google.cloud.oslogin.v1.LoginProfile"\xcf\x01\n\x19UpdateSshPublicKeyRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\n#oslogin.googleapis.com/SshPublicKey\x12\x46\n\x0essh_public_key\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKeyB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask2\xd0\t\n\x0eOsLoginService\x12\x8e\x01\n\x12\x44\x65letePosixAccount\x12\x32.google.cloud.oslogin.v1.DeletePosixAccountRequest\x1a\x16.google.protobuf.Empty",\x82\xd3\xe4\x93\x02\x1f*\x1d/v1/{name=users/*/projects/*}\xda\x41\x04name\x12\x93\x01\n\x12\x44\x65leteSshPublicKey\x12\x32.google.cloud.oslogin.v1.DeleteSshPublicKeyRequest\x1a\x16.google.protobuf.Empty"1\x82\xd3\xe4\x93\x02$*"/v1/{name=users/*/sshPublicKeys/*}\xda\x41\x04name\x12\x99\x01\n\x0fGetLoginProfile\x12/.google.cloud.oslogin.v1.GetLoginProfileRequest\x1a%.google.cloud.oslogin.v1.LoginProfile".\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{name=users/*}/loginProfile\xda\x41\x04name\x12\xa0\x01\n\x0fGetSshPublicKey\x12/.google.cloud.oslogin.v1.GetSshPublicKeyRequest\x1a).google.cloud.oslogin.common.SshPublicKey"1\x82\xd3\xe4\x93\x02$\x12"/v1/{name=users/*/sshPublicKeys/*}\xda\x41\x04name\x12\xf9\x01\n\x12ImportSshPublicKey\x12\x32.google.cloud.oslogin.v1.ImportSshPublicKeyRequest\x1a\x33.google.cloud.oslogin.v1.ImportSshPublicKeyResponse"z\x82\xd3\xe4\x93\x02\x39"\'/v1/{parent=users/*}:importSshPublicKey:\x0essh_public_key\xda\x41\x15parent,ssh_public_key\xda\x41 parent,ssh_public_key,project_id\x12\xe7\x01\n\x12UpdateSshPublicKey\x12\x32.google.cloud.oslogin.v1.UpdateSshPublicKeyRequest\x1a).google.cloud.oslogin.common.SshPublicKey"r\x82\xd3\xe4\x93\x02\x34\x32"/v1/{name=users/*/sshPublicKeys/*}:\x0essh_public_key\xda\x41\x13name,ssh_public_key\xda\x41\x1fname,ssh_public_key,update_mask\x1ar\xca\x41\x16oslogin.googleapis.com\xd2\x41Vhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/computeB\xa1\x01\n\x1b\x63om.google.cloud.oslogin.v1B\x0cOsLoginProtoP\x01Z>google.golang.org/genproto/googleapis/cloud/oslogin/v1;oslogin\xaa\x02\x17Google.Cloud.OsLogin.V1\xca\x02\x17Google\\Cloud\\OsLogin\\V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, @@ -93,8 +100,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=407, - serialized_end=502, + serialized_start=478, + serialized_end=573, ) _LOGINPROFILE = _descriptor.Descriptor( @@ -119,7 +126,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -158,24 +165,6 @@ serialized_options=None, file=DESCRIPTOR, ), - _descriptor.FieldDescriptor( - name="suspended", - full_name="google.cloud.oslogin.v1.LoginProfile.suspended", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), ], extensions=[], nested_types=[_LOGINPROFILE_SSHPUBLICKEYSENTRY], @@ -185,8 +174,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=208, - serialized_end=502, + serialized_start=293, + serialized_end=573, ) @@ -212,7 +201,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A%\n#oslogin.googleapis.com/PosixAccount" + ), file=DESCRIPTOR, ) ], @@ -224,8 +215,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=504, - serialized_end=545, + serialized_start=575, + serialized_end=661, ) @@ -251,7 +242,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A%\n#oslogin.googleapis.com/SshPublicKey" + ), file=DESCRIPTOR, ) ], @@ -263,8 +256,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=547, - serialized_end=588, + serialized_start=663, + serialized_end=749, ) @@ -290,9 +283,47 @@ containing_type=None, is_extension=False, extension_scope=None, + serialized_options=_b( + "\340A\002\372A%\022#oslogin.googleapis.com/PosixAccount" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.cloud.oslogin.v1.GetLoginProfileRequest.project_id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), + _descriptor.FieldDescriptor( + name="system_id", + full_name="google.cloud.oslogin.v1.GetLoginProfileRequest.system_id", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -302,8 +333,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=590, - serialized_end=628, + serialized_start=751, + serialized_end=873, ) @@ -329,7 +360,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A%\n#oslogin.googleapis.com/SshPublicKey" + ), file=DESCRIPTOR, ) ], @@ -341,8 +374,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=630, - serialized_end=668, + serialized_start=875, + serialized_end=958, ) @@ -368,7 +401,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A%\022#oslogin.googleapis.com/SshPublicKey" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -386,7 +421,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -416,8 +451,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=671, - serialized_end=801, + serialized_start=961, + serialized_end=1141, ) @@ -455,8 +490,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=803, - serialized_end=893, + serialized_start=1143, + serialized_end=1233, ) @@ -482,7 +517,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A%\n#oslogin.googleapis.com/SshPublicKey" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -500,7 +537,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -530,8 +567,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=896, - serialized_end=1053, + serialized_start=1236, + serialized_end=1443, ) _LOGINPROFILE_SSHPUBLICKEYSENTRY.fields_by_name[ @@ -595,15 +632,12 @@ Attributes: name: - The primary email address that uniquely identifies the user. + Required. A unique user ID. posix_accounts: The list of POSIX accounts associated with the user. ssh_public_keys: A map from SSH public key fingerprint to the associated key object. - suspended: - Indicates if the user is suspended. A suspended user cannot - log in but their profile information is retained. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.LoginProfile) ), @@ -622,9 +656,9 @@ Attributes: name: - A reference to the POSIX account to update. POSIX accounts are - identified by the project ID they are associated with. A - reference to the POSIX account is in format + Required. A reference to the POSIX account to update. POSIX + accounts are identified by the project ID they are associated + with. A reference to the POSIX account is in format ``users/{user}/projects/{project}``. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.DeletePosixAccountRequest) @@ -643,9 +677,9 @@ Attributes: name: - The fingerprint of the public key to update. Public keys are - identified by their SHA-256 fingerprint. The fingerprint of - the public key is in format + Required. The fingerprint of the public key to update. Public + keys are identified by their SHA-256 fingerprint. The + fingerprint of the public key is in format ``users/{user}/sshPublicKeys/{fingerprint}``. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.DeleteSshPublicKeyRequest) @@ -665,7 +699,12 @@ Attributes: name: - The unique ID for the user in format ``users/{user}``. + Required. The unique ID for the user in format + ``users/{user}``. + project_id: + The project ID of the Google Cloud Platform project. + system_id: + A system ID for filtering the results of the request. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.GetLoginProfileRequest) ), @@ -683,9 +722,9 @@ Attributes: name: - The fingerprint of the public key to retrieve. Public keys are - identified by their SHA-256 fingerprint. The fingerprint of - the public key is in format + Required. The fingerprint of the public key to retrieve. + Public keys are identified by their SHA-256 fingerprint. The + fingerprint of the public key is in format ``users/{user}/sshPublicKeys/{fingerprint}``. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.GetSshPublicKeyRequest) @@ -704,9 +743,10 @@ Attributes: parent: - The unique ID for the user in format ``users/{user}``. + Required. The unique ID for the user in format + ``users/{user}``. ssh_public_key: - The SSH public key and expiration time. + Optional. The SSH public key and expiration time. project_id: The project ID of the Google Cloud Platform project. """, @@ -744,12 +784,12 @@ Attributes: name: - The fingerprint of the public key to update. Public keys are - identified by their SHA-256 fingerprint. The fingerprint of - the public key is in format + Required. The fingerprint of the public key to update. Public + keys are identified by their SHA-256 fingerprint. The + fingerprint of the public key is in format ``users/{user}/sshPublicKeys/{fingerprint}``. ssh_public_key: - The SSH public key and expiration time. + Required. The SSH public key and expiration time. update_mask: Mask to control which fields get updated. Updates all if not present. @@ -762,15 +802,26 @@ DESCRIPTOR._options = None _LOGINPROFILE_SSHPUBLICKEYSENTRY._options = None +_LOGINPROFILE.fields_by_name["name"]._options = None +_DELETEPOSIXACCOUNTREQUEST.fields_by_name["name"]._options = None +_DELETESSHPUBLICKEYREQUEST.fields_by_name["name"]._options = None +_GETLOGINPROFILEREQUEST.fields_by_name["name"]._options = None +_GETSSHPUBLICKEYREQUEST.fields_by_name["name"]._options = None +_IMPORTSSHPUBLICKEYREQUEST.fields_by_name["parent"]._options = None +_IMPORTSSHPUBLICKEYREQUEST.fields_by_name["ssh_public_key"]._options = None +_UPDATESSHPUBLICKEYREQUEST.fields_by_name["name"]._options = None +_UPDATESSHPUBLICKEYREQUEST.fields_by_name["ssh_public_key"]._options = None _OSLOGINSERVICE = _descriptor.ServiceDescriptor( name="OsLoginService", full_name="google.cloud.oslogin.v1.OsLoginService", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1056, - serialized_end=2029, + serialized_options=_b( + "\312A\026oslogin.googleapis.com\322AVhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/compute" + ), + serialized_start=1446, + serialized_end=2678, methods=[ _descriptor.MethodDescriptor( name="DeletePosixAccount", @@ -780,7 +831,7 @@ input_type=_DELETEPOSIXACCOUNTREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002\037*\035/v1/{name=users/*/projects/*}" + "\202\323\344\223\002\037*\035/v1/{name=users/*/projects/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -791,7 +842,7 @@ input_type=_DELETESSHPUBLICKEYREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - '\202\323\344\223\002$*"/v1/{name=users/*/sshPublicKeys/*}' + '\202\323\344\223\002$*"/v1/{name=users/*/sshPublicKeys/*}\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -802,7 +853,7 @@ input_type=_GETLOGINPROFILEREQUEST, output_type=_LOGINPROFILE, serialized_options=_b( - "\202\323\344\223\002!\022\037/v1/{name=users/*}/loginProfile" + "\202\323\344\223\002!\022\037/v1/{name=users/*}/loginProfile\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -813,7 +864,7 @@ input_type=_GETSSHPUBLICKEYREQUEST, output_type=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY, serialized_options=_b( - '\202\323\344\223\002$\022"/v1/{name=users/*/sshPublicKeys/*}' + '\202\323\344\223\002$\022"/v1/{name=users/*/sshPublicKeys/*}\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -824,7 +875,7 @@ input_type=_IMPORTSSHPUBLICKEYREQUEST, output_type=_IMPORTSSHPUBLICKEYRESPONSE, serialized_options=_b( - "\202\323\344\223\0029\"'/v1/{parent=users/*}:importSshPublicKey:\016ssh_public_key" + "\202\323\344\223\0029\"'/v1/{parent=users/*}:importSshPublicKey:\016ssh_public_key\332A\025parent,ssh_public_key\332A parent,ssh_public_key,project_id" ), ), _descriptor.MethodDescriptor( @@ -835,7 +886,7 @@ input_type=_UPDATESSHPUBLICKEYREQUEST, output_type=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY, serialized_options=_b( - '\202\323\344\223\00242"/v1/{name=users/*/sshPublicKeys/*}:\016ssh_public_key' + '\202\323\344\223\00242"/v1/{name=users/*/sshPublicKeys/*}:\016ssh_public_key\332A\023name,ssh_public_key\332A\037name,ssh_public_key,update_mask' ), ), ], diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/oslogin_pb2.py b/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/oslogin_pb2.py deleted file mode 100644 index 11ba220ecfdc..000000000000 --- a/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/oslogin_pb2.py +++ /dev/null @@ -1,848 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/oslogin_v1/proto/oslogin.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.oslogin_v1.proto import ( - common_pb2 as google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/oslogin_v1/proto/oslogin.proto", - package="google.cloud.oslogin.v1", - syntax="proto3", - serialized_options=_b( - "\n\033com.google.cloud.oslogin.v1B\014OsLoginProtoP\001Z>google.golang.org/genproto/googleapis/cloud/oslogin/v1;oslogin\252\002\027Google.Cloud.OsLogin.V1\312\002\027Google\\Cloud\\OsLogin\\V1" - ), - serialized_pb=_b( - '\n+google/cloud/oslogin_v1/proto/oslogin.proto\x12\x17google.cloud.oslogin.v1\x1a\x1cgoogle/api/annotations.proto\x1a(google/cloud/oslogin/common/common.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\xa6\x02\n\x0cLoginProfile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x41\n\x0eposix_accounts\x18\x02 \x03(\x0b\x32).google.cloud.oslogin.common.PosixAccount\x12Q\n\x0fssh_public_keys\x18\x03 \x03(\x0b\x32\x38.google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry\x12\x11\n\tsuspended\x18\x04 \x01(\x08\x1a_\n\x12SshPublicKeysEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey:\x02\x38\x01")\n\x19\x44\x65letePosixAccountRequest\x12\x0c\n\x04name\x18\x01 \x01(\t")\n\x19\x44\x65leteSshPublicKeyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"&\n\x16GetLoginProfileRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"&\n\x16GetSshPublicKeyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x82\x01\n\x19ImportSshPublicKeyRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x41\n\x0essh_public_key\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey\x12\x12\n\nproject_id\x18\x03 \x01(\t"Z\n\x1aImportSshPublicKeyResponse\x12<\n\rlogin_profile\x18\x01 \x01(\x0b\x32%.google.cloud.oslogin.v1.LoginProfile"\x9d\x01\n\x19UpdateSshPublicKeyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x41\n\x0essh_public_key\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask2\xcd\x07\n\x0eOsLoginService\x12\x87\x01\n\x12\x44\x65letePosixAccount\x12\x32.google.cloud.oslogin.v1.DeletePosixAccountRequest\x1a\x16.google.protobuf.Empty"%\x82\xd3\xe4\x93\x02\x1f*\x1d/v1/{name=users/*/projects/*}\x12\x8c\x01\n\x12\x44\x65leteSshPublicKey\x12\x32.google.cloud.oslogin.v1.DeleteSshPublicKeyRequest\x1a\x16.google.protobuf.Empty"*\x82\xd3\xe4\x93\x02$*"/v1/{name=users/*/sshPublicKeys/*}\x12\x92\x01\n\x0fGetLoginProfile\x12/.google.cloud.oslogin.v1.GetLoginProfileRequest\x1a%.google.cloud.oslogin.v1.LoginProfile"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{name=users/*}/loginProfile\x12\x99\x01\n\x0fGetSshPublicKey\x12/.google.cloud.oslogin.v1.GetSshPublicKeyRequest\x1a).google.cloud.oslogin.common.SshPublicKey"*\x82\xd3\xe4\x93\x02$\x12"/v1/{name=users/*/sshPublicKeys/*}\x12\xbe\x01\n\x12ImportSshPublicKey\x12\x32.google.cloud.oslogin.v1.ImportSshPublicKeyRequest\x1a\x33.google.cloud.oslogin.v1.ImportSshPublicKeyResponse"?\x82\xd3\xe4\x93\x02\x39"\'/v1/{parent=users/*}:importSshPublicKey:\x0essh_public_key\x12\xaf\x01\n\x12UpdateSshPublicKey\x12\x32.google.cloud.oslogin.v1.UpdateSshPublicKeyRequest\x1a).google.cloud.oslogin.common.SshPublicKey":\x82\xd3\xe4\x93\x02\x34\x32"/v1/{name=users/*/sshPublicKeys/*}:\x0essh_public_keyB\xa1\x01\n\x1b\x63om.google.cloud.oslogin.v1B\x0cOsLoginProtoP\x01Z>google.golang.org/genproto/googleapis/cloud/oslogin/v1;oslogin\xaa\x02\x17Google.Cloud.OsLogin.V1\xca\x02\x17Google\\Cloud\\OsLogin\\V1b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - ], -) - - -_LOGINPROFILE_SSHPUBLICKEYSENTRY = _descriptor.Descriptor( - name="SshPublicKeysEntry", - full_name="google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=407, - serialized_end=502, -) - -_LOGINPROFILE = _descriptor.Descriptor( - name="LoginProfile", - full_name="google.cloud.oslogin.v1.LoginProfile", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.oslogin.v1.LoginProfile.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="posix_accounts", - full_name="google.cloud.oslogin.v1.LoginProfile.posix_accounts", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="ssh_public_keys", - full_name="google.cloud.oslogin.v1.LoginProfile.ssh_public_keys", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="suspended", - full_name="google.cloud.oslogin.v1.LoginProfile.suspended", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_LOGINPROFILE_SSHPUBLICKEYSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=208, - serialized_end=502, -) - - -_DELETEPOSIXACCOUNTREQUEST = _descriptor.Descriptor( - name="DeletePosixAccountRequest", - full_name="google.cloud.oslogin.v1.DeletePosixAccountRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.oslogin.v1.DeletePosixAccountRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=504, - serialized_end=545, -) - - -_DELETESSHPUBLICKEYREQUEST = _descriptor.Descriptor( - name="DeleteSshPublicKeyRequest", - full_name="google.cloud.oslogin.v1.DeleteSshPublicKeyRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.oslogin.v1.DeleteSshPublicKeyRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=547, - serialized_end=588, -) - - -_GETLOGINPROFILEREQUEST = _descriptor.Descriptor( - name="GetLoginProfileRequest", - full_name="google.cloud.oslogin.v1.GetLoginProfileRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.oslogin.v1.GetLoginProfileRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=590, - serialized_end=628, -) - - -_GETSSHPUBLICKEYREQUEST = _descriptor.Descriptor( - name="GetSshPublicKeyRequest", - full_name="google.cloud.oslogin.v1.GetSshPublicKeyRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.oslogin.v1.GetSshPublicKeyRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=630, - serialized_end=668, -) - - -_IMPORTSSHPUBLICKEYREQUEST = _descriptor.Descriptor( - name="ImportSshPublicKeyRequest", - full_name="google.cloud.oslogin.v1.ImportSshPublicKeyRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.oslogin.v1.ImportSshPublicKeyRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="ssh_public_key", - full_name="google.cloud.oslogin.v1.ImportSshPublicKeyRequest.ssh_public_key", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.oslogin.v1.ImportSshPublicKeyRequest.project_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=671, - serialized_end=801, -) - - -_IMPORTSSHPUBLICKEYRESPONSE = _descriptor.Descriptor( - name="ImportSshPublicKeyResponse", - full_name="google.cloud.oslogin.v1.ImportSshPublicKeyResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="login_profile", - full_name="google.cloud.oslogin.v1.ImportSshPublicKeyResponse.login_profile", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=803, - serialized_end=893, -) - - -_UPDATESSHPUBLICKEYREQUEST = _descriptor.Descriptor( - name="UpdateSshPublicKeyRequest", - full_name="google.cloud.oslogin.v1.UpdateSshPublicKeyRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.oslogin.v1.UpdateSshPublicKeyRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="ssh_public_key", - full_name="google.cloud.oslogin.v1.UpdateSshPublicKeyRequest.ssh_public_key", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.oslogin.v1.UpdateSshPublicKeyRequest.update_mask", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=896, - serialized_end=1053, -) - -_LOGINPROFILE_SSHPUBLICKEYSENTRY.fields_by_name[ - "value" -].message_type = google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY -_LOGINPROFILE_SSHPUBLICKEYSENTRY.containing_type = _LOGINPROFILE -_LOGINPROFILE.fields_by_name[ - "posix_accounts" -].message_type = google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._POSIXACCOUNT -_LOGINPROFILE.fields_by_name[ - "ssh_public_keys" -].message_type = _LOGINPROFILE_SSHPUBLICKEYSENTRY -_IMPORTSSHPUBLICKEYREQUEST.fields_by_name[ - "ssh_public_key" -].message_type = google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY -_IMPORTSSHPUBLICKEYRESPONSE.fields_by_name["login_profile"].message_type = _LOGINPROFILE -_UPDATESSHPUBLICKEYREQUEST.fields_by_name[ - "ssh_public_key" -].message_type = google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY -_UPDATESSHPUBLICKEYREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -DESCRIPTOR.message_types_by_name["LoginProfile"] = _LOGINPROFILE -DESCRIPTOR.message_types_by_name[ - "DeletePosixAccountRequest" -] = _DELETEPOSIXACCOUNTREQUEST -DESCRIPTOR.message_types_by_name[ - "DeleteSshPublicKeyRequest" -] = _DELETESSHPUBLICKEYREQUEST -DESCRIPTOR.message_types_by_name["GetLoginProfileRequest"] = _GETLOGINPROFILEREQUEST -DESCRIPTOR.message_types_by_name["GetSshPublicKeyRequest"] = _GETSSHPUBLICKEYREQUEST -DESCRIPTOR.message_types_by_name[ - "ImportSshPublicKeyRequest" -] = _IMPORTSSHPUBLICKEYREQUEST -DESCRIPTOR.message_types_by_name[ - "ImportSshPublicKeyResponse" -] = _IMPORTSSHPUBLICKEYRESPONSE -DESCRIPTOR.message_types_by_name[ - "UpdateSshPublicKeyRequest" -] = _UPDATESSHPUBLICKEYREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LoginProfile = _reflection.GeneratedProtocolMessageType( - "LoginProfile", - (_message.Message,), - dict( - SshPublicKeysEntry=_reflection.GeneratedProtocolMessageType( - "SshPublicKeysEntry", - (_message.Message,), - dict( - DESCRIPTOR=_LOGINPROFILE_SSHPUBLICKEYSENTRY, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry) - ), - ), - DESCRIPTOR=_LOGINPROFILE, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""The user profile information used for logging in to a virtual machine on - Google Compute Engine. - - - Attributes: - name: - The primary email address that uniquely identifies the user. - posix_accounts: - The list of POSIX accounts associated with the user. - ssh_public_keys: - A map from SSH public key fingerprint to the associated key - object. - suspended: - Indicates if the user is suspended. A suspended user cannot - log in but their profile information is retained. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.LoginProfile) - ), -) -_sym_db.RegisterMessage(LoginProfile) -_sym_db.RegisterMessage(LoginProfile.SshPublicKeysEntry) - -DeletePosixAccountRequest = _reflection.GeneratedProtocolMessageType( - "DeletePosixAccountRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEPOSIXACCOUNTREQUEST, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A request message for deleting a POSIX account entry. - - - Attributes: - name: - A reference to the POSIX account to update. POSIX accounts are - identified by the project ID they are associated with. A - reference to the POSIX account is in format - ``users/{user}/projects/{project}``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.DeletePosixAccountRequest) - ), -) -_sym_db.RegisterMessage(DeletePosixAccountRequest) - -DeleteSshPublicKeyRequest = _reflection.GeneratedProtocolMessageType( - "DeleteSshPublicKeyRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETESSHPUBLICKEYREQUEST, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A request message for deleting an SSH public key. - - - Attributes: - name: - The fingerprint of the public key to update. Public keys are - identified by their SHA-256 fingerprint. The fingerprint of - the public key is in format - ``users/{user}/sshPublicKeys/{fingerprint}``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.DeleteSshPublicKeyRequest) - ), -) -_sym_db.RegisterMessage(DeleteSshPublicKeyRequest) - -GetLoginProfileRequest = _reflection.GeneratedProtocolMessageType( - "GetLoginProfileRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETLOGINPROFILEREQUEST, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A request message for retrieving the login profile information for a - user. - - - Attributes: - name: - The unique ID for the user in format ``users/{user}``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.GetLoginProfileRequest) - ), -) -_sym_db.RegisterMessage(GetLoginProfileRequest) - -GetSshPublicKeyRequest = _reflection.GeneratedProtocolMessageType( - "GetSshPublicKeyRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETSSHPUBLICKEYREQUEST, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A request message for retrieving an SSH public key. - - - Attributes: - name: - The fingerprint of the public key to retrieve. Public keys are - identified by their SHA-256 fingerprint. The fingerprint of - the public key is in format - ``users/{user}/sshPublicKeys/{fingerprint}``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.GetSshPublicKeyRequest) - ), -) -_sym_db.RegisterMessage(GetSshPublicKeyRequest) - -ImportSshPublicKeyRequest = _reflection.GeneratedProtocolMessageType( - "ImportSshPublicKeyRequest", - (_message.Message,), - dict( - DESCRIPTOR=_IMPORTSSHPUBLICKEYREQUEST, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A request message for importing an SSH public key. - - - Attributes: - parent: - The unique ID for the user in format ``users/{user}``. - ssh_public_key: - The SSH public key and expiration time. - project_id: - The project ID of the Google Cloud Platform project. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.ImportSshPublicKeyRequest) - ), -) -_sym_db.RegisterMessage(ImportSshPublicKeyRequest) - -ImportSshPublicKeyResponse = _reflection.GeneratedProtocolMessageType( - "ImportSshPublicKeyResponse", - (_message.Message,), - dict( - DESCRIPTOR=_IMPORTSSHPUBLICKEYRESPONSE, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A response message for importing an SSH public key. - - - Attributes: - login_profile: - The login profile information for the user. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.ImportSshPublicKeyResponse) - ), -) -_sym_db.RegisterMessage(ImportSshPublicKeyResponse) - -UpdateSshPublicKeyRequest = _reflection.GeneratedProtocolMessageType( - "UpdateSshPublicKeyRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATESSHPUBLICKEYREQUEST, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A request message for updating an SSH public key. - - - Attributes: - name: - The fingerprint of the public key to update. Public keys are - identified by their SHA-256 fingerprint. The fingerprint of - the public key is in format - ``users/{user}/sshPublicKeys/{fingerprint}``. - ssh_public_key: - The SSH public key and expiration time. - update_mask: - Mask to control which fields get updated. Updates all if not - present. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.UpdateSshPublicKeyRequest) - ), -) -_sym_db.RegisterMessage(UpdateSshPublicKeyRequest) - - -DESCRIPTOR._options = None -_LOGINPROFILE_SSHPUBLICKEYSENTRY._options = None - -_OSLOGINSERVICE = _descriptor.ServiceDescriptor( - name="OsLoginService", - full_name="google.cloud.oslogin.v1.OsLoginService", - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=1056, - serialized_end=2029, - methods=[ - _descriptor.MethodDescriptor( - name="DeletePosixAccount", - full_name="google.cloud.oslogin.v1.OsLoginService.DeletePosixAccount", - index=0, - containing_service=None, - input_type=_DELETEPOSIXACCOUNTREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002\037*\035/v1/{name=users/*/projects/*}" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteSshPublicKey", - full_name="google.cloud.oslogin.v1.OsLoginService.DeleteSshPublicKey", - index=1, - containing_service=None, - input_type=_DELETESSHPUBLICKEYREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002$*"/v1/{name=users/*/sshPublicKeys/*}' - ), - ), - _descriptor.MethodDescriptor( - name="GetLoginProfile", - full_name="google.cloud.oslogin.v1.OsLoginService.GetLoginProfile", - index=2, - containing_service=None, - input_type=_GETLOGINPROFILEREQUEST, - output_type=_LOGINPROFILE, - serialized_options=_b( - "\202\323\344\223\002!\022\037/v1/{name=users/*}/loginProfile" - ), - ), - _descriptor.MethodDescriptor( - name="GetSshPublicKey", - full_name="google.cloud.oslogin.v1.OsLoginService.GetSshPublicKey", - index=3, - containing_service=None, - input_type=_GETSSHPUBLICKEYREQUEST, - output_type=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY, - serialized_options=_b( - '\202\323\344\223\002$\022"/v1/{name=users/*/sshPublicKeys/*}' - ), - ), - _descriptor.MethodDescriptor( - name="ImportSshPublicKey", - full_name="google.cloud.oslogin.v1.OsLoginService.ImportSshPublicKey", - index=4, - containing_service=None, - input_type=_IMPORTSSHPUBLICKEYREQUEST, - output_type=_IMPORTSSHPUBLICKEYRESPONSE, - serialized_options=_b( - "\202\323\344\223\0029\"'/v1/{parent=users/*}:importSshPublicKey:\016ssh_public_key" - ), - ), - _descriptor.MethodDescriptor( - name="UpdateSshPublicKey", - full_name="google.cloud.oslogin.v1.OsLoginService.UpdateSshPublicKey", - index=5, - containing_service=None, - input_type=_UPDATESSHPUBLICKEYREQUEST, - output_type=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY, - serialized_options=_b( - '\202\323\344\223\00242"/v1/{name=users/*/sshPublicKeys/*}:\016ssh_public_key' - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_OSLOGINSERVICE) - -DESCRIPTOR.services_by_name["OsLoginService"] = _OSLOGINSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/oslogin_pb2_grpc.py b/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/oslogin_pb2_grpc.py deleted file mode 100644 index 6b002666a67b..000000000000 --- a/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/oslogin_pb2_grpc.py +++ /dev/null @@ -1,148 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.oslogin_v1.proto import ( - common_pb2 as google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2, -) -from google.cloud.oslogin_v1.proto import ( - oslogin_pb2 as google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class OsLoginServiceStub(object): - """Cloud OS Login API - - The Cloud OS Login API allows you to manage users and their associated SSH - public keys for logging into virtual machines on Google Cloud Platform. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.DeletePosixAccount = channel.unary_unary( - "/google.cloud.oslogin.v1.OsLoginService/DeletePosixAccount", - request_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.DeletePosixAccountRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.DeleteSshPublicKey = channel.unary_unary( - "/google.cloud.oslogin.v1.OsLoginService/DeleteSshPublicKey", - request_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.DeleteSshPublicKeyRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetLoginProfile = channel.unary_unary( - "/google.cloud.oslogin.v1.OsLoginService/GetLoginProfile", - request_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.GetLoginProfileRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.LoginProfile.FromString, - ) - self.GetSshPublicKey = channel.unary_unary( - "/google.cloud.oslogin.v1.OsLoginService/GetSshPublicKey", - request_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.GetSshPublicKeyRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2.SshPublicKey.FromString, - ) - self.ImportSshPublicKey = channel.unary_unary( - "/google.cloud.oslogin.v1.OsLoginService/ImportSshPublicKey", - request_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.ImportSshPublicKeyRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.ImportSshPublicKeyResponse.FromString, - ) - self.UpdateSshPublicKey = channel.unary_unary( - "/google.cloud.oslogin.v1.OsLoginService/UpdateSshPublicKey", - request_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.UpdateSshPublicKeyRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2.SshPublicKey.FromString, - ) - - -class OsLoginServiceServicer(object): - """Cloud OS Login API - - The Cloud OS Login API allows you to manage users and their associated SSH - public keys for logging into virtual machines on Google Cloud Platform. - """ - - def DeletePosixAccount(self, request, context): - """Deletes a POSIX account. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteSshPublicKey(self, request, context): - """Deletes an SSH public key. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetLoginProfile(self, request, context): - """Retrieves the profile information used for logging in to a virtual machine - on Google Compute Engine. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetSshPublicKey(self, request, context): - """Retrieves an SSH public key. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ImportSshPublicKey(self, request, context): - """Adds an SSH public key and returns the profile information. Default POSIX - account information is set when no username and UID exist as part of the - login profile. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateSshPublicKey(self, request, context): - """Updates an SSH public key and returns the profile information. This method - supports patch semantics. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_OsLoginServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "DeletePosixAccount": grpc.unary_unary_rpc_method_handler( - servicer.DeletePosixAccount, - request_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.DeletePosixAccountRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "DeleteSshPublicKey": grpc.unary_unary_rpc_method_handler( - servicer.DeleteSshPublicKey, - request_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.DeleteSshPublicKeyRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "GetLoginProfile": grpc.unary_unary_rpc_method_handler( - servicer.GetLoginProfile, - request_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.GetLoginProfileRequest.FromString, - response_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.LoginProfile.SerializeToString, - ), - "GetSshPublicKey": grpc.unary_unary_rpc_method_handler( - servicer.GetSshPublicKey, - request_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.GetSshPublicKeyRequest.FromString, - response_serializer=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2.SshPublicKey.SerializeToString, - ), - "ImportSshPublicKey": grpc.unary_unary_rpc_method_handler( - servicer.ImportSshPublicKey, - request_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.ImportSshPublicKeyRequest.FromString, - response_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.ImportSshPublicKeyResponse.SerializeToString, - ), - "UpdateSshPublicKey": grpc.unary_unary_rpc_method_handler( - servicer.UpdateSshPublicKey, - request_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.UpdateSshPublicKeyRequest.FromString, - response_serializer=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2.SshPublicKey.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.oslogin.v1.OsLoginService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/oslogin/synth.metadata b/oslogin/synth.metadata index 56d7ccd28794..cb189d0cc1d0 100644 --- a/oslogin/synth.metadata +++ b/oslogin/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:33:01.454928Z", + "updateTime": "2019-10-25T21:10:32.534388Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.2", + "dockerImage": "googleapis/artman@sha256:3b8f7d9b4c206843ce08053474f5c64ae4d388ff7d995e68b59fb65edf73eeb9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "d27a44798506d28e8e6d874bd128da43f45f74c4", + "internalRef": "276716410" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/oslogin/synth.py b/oslogin/synth.py index 7e76e18b1c4e..4bcb94cadb59 100644 --- a/oslogin/synth.py +++ b/oslogin/synth.py @@ -29,9 +29,20 @@ artman_output_name="os-login-v1", include_protos=True, ) +# pb2's are incorrectly generated into deeper directories, so copy separately into proto/ +s.move( + library, + excludes=[ + "nox.py", + "setup.py", + "README.rst", + "docs/index.rst", + "**/proto/oslogin/**", + "**/proto/oslogin_v1/**", + ], +) +s.move(library / "google/cloud/oslogin_v1/proto/**/*", "google/cloud/oslogin_v1/proto") -s.move(library / "google/cloud/oslogin_v1") -s.move(library / "tests/unit/gapic/v1") # Fix up imports s.replace( diff --git a/oslogin/tests/unit/gapic/v1/test_os_login_service_client_v1.py b/oslogin/tests/unit/gapic/v1/test_os_login_service_client_v1.py index 18506c0d6859..a912a5780d3a 100644 --- a/oslogin/tests/unit/gapic/v1/test_os_login_service_client_v1.py +++ b/oslogin/tests/unit/gapic/v1/test_os_login_service_client_v1.py @@ -70,7 +70,7 @@ def test_delete_posix_account(self): client = oslogin_v1.OsLoginServiceClient() # Setup Request - name = client.project_path("[USER]", "[PROJECT]") + name = client.posix_account_path("[USER]", "[PROJECT]") client.delete_posix_account(name) @@ -88,7 +88,7 @@ def test_delete_posix_account_exception(self): client = oslogin_v1.OsLoginServiceClient() # Setup request - name = client.project_path("[USER]", "[PROJECT]") + name = client.posix_account_path("[USER]", "[PROJECT]") with pytest.raises(CustomException): client.delete_posix_account(name) @@ -101,7 +101,7 @@ def test_delete_ssh_public_key(self): client = oslogin_v1.OsLoginServiceClient() # Setup Request - name = client.fingerprint_path("[USER]", "[FINGERPRINT]") + name = client.ssh_public_key_path("[USER]", "[FINGERPRINT]") client.delete_ssh_public_key(name) @@ -119,7 +119,7 @@ def test_delete_ssh_public_key_exception(self): client = oslogin_v1.OsLoginServiceClient() # Setup request - name = client.fingerprint_path("[USER]", "[FINGERPRINT]") + name = client.ssh_public_key_path("[USER]", "[FINGERPRINT]") with pytest.raises(CustomException): client.delete_ssh_public_key(name) @@ -127,8 +127,7 @@ def test_delete_ssh_public_key_exception(self): def test_get_login_profile(self): # Setup Expected Response name_2 = "name2-1052831874" - suspended = False - expected_response = {"name": name_2, "suspended": suspended} + expected_response = {"name": name_2} expected_response = oslogin_pb2.LoginProfile(**expected_response) # Mock the API response @@ -168,10 +167,12 @@ def test_get_ssh_public_key(self): key = "key106079" expiration_time_usec = 2058878882 fingerprint = "fingerprint-1375934236" + name_2 = "name2-1052831874" expected_response = { "key": key, "expiration_time_usec": expiration_time_usec, "fingerprint": fingerprint, + "name": name_2, } expected_response = common_pb2.SshPublicKey(**expected_response) @@ -183,7 +184,7 @@ def test_get_ssh_public_key(self): client = oslogin_v1.OsLoginServiceClient() # Setup Request - name = client.fingerprint_path("[USER]", "[FINGERPRINT]") + name = client.ssh_public_key_path("[USER]", "[FINGERPRINT]") response = client.get_ssh_public_key(name) assert expected_response == response @@ -202,7 +203,7 @@ def test_get_ssh_public_key_exception(self): client = oslogin_v1.OsLoginServiceClient() # Setup request - name = client.fingerprint_path("[USER]", "[FINGERPRINT]") + name = client.ssh_public_key_path("[USER]", "[FINGERPRINT]") with pytest.raises(CustomException): client.get_ssh_public_key(name) @@ -221,15 +222,12 @@ def test_import_ssh_public_key(self): # Setup Request parent = client.user_path("[USER]") - ssh_public_key = {} - response = client.import_ssh_public_key(parent, ssh_public_key) + response = client.import_ssh_public_key(parent) assert expected_response == response assert len(channel.requests) == 1 - expected_request = oslogin_pb2.ImportSshPublicKeyRequest( - parent=parent, ssh_public_key=ssh_public_key - ) + expected_request = oslogin_pb2.ImportSshPublicKeyRequest(parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -243,20 +241,21 @@ def test_import_ssh_public_key_exception(self): # Setup request parent = client.user_path("[USER]") - ssh_public_key = {} with pytest.raises(CustomException): - client.import_ssh_public_key(parent, ssh_public_key) + client.import_ssh_public_key(parent) def test_update_ssh_public_key(self): # Setup Expected Response key = "key106079" expiration_time_usec = 2058878882 fingerprint = "fingerprint-1375934236" + name_2 = "name2-1052831874" expected_response = { "key": key, "expiration_time_usec": expiration_time_usec, "fingerprint": fingerprint, + "name": name_2, } expected_response = common_pb2.SshPublicKey(**expected_response) @@ -268,7 +267,7 @@ def test_update_ssh_public_key(self): client = oslogin_v1.OsLoginServiceClient() # Setup Request - name = client.fingerprint_path("[USER]", "[FINGERPRINT]") + name = client.ssh_public_key_path("[USER]", "[FINGERPRINT]") ssh_public_key = {} response = client.update_ssh_public_key(name, ssh_public_key) @@ -290,7 +289,7 @@ def test_update_ssh_public_key_exception(self): client = oslogin_v1.OsLoginServiceClient() # Setup request - name = client.fingerprint_path("[USER]", "[FINGERPRINT]") + name = client.ssh_public_key_path("[USER]", "[FINGERPRINT]") ssh_public_key = {} with pytest.raises(CustomException): diff --git a/phishingprotection/CHANGELOG.md b/phishingprotection/CHANGELOG.md index 5054a4350d8f..d26be3227078 100644 --- a/phishingprotection/CHANGELOG.md +++ b/phishingprotection/CHANGELOG.md @@ -4,6 +4,34 @@ [1]: https://pypi.org/project/google-cloud-phishing-protection/#history + +## 0.2.0 + +10-10-2019 15:30 PDT + +### Implementation Changes +- Use correct release status. ([#9451](https://github.com/googleapis/google-cloud-python/pull/9451)) +- Remove send / receive message size limit (via synth). ([#8963](https://github.com/googleapis/google-cloud-python/pull/8963)) +- Add `client_options` support, re-template / blacken files. ([#8539](https://github.com/googleapis/google-cloud-python/pull/8539)) +- Fix dist name used to compute `gapic_version`. ([#8100](https://github.com/googleapis/google-cloud-python/pull/8100)) +- Remove retries for `DEADLINE_EXCEEDED` (via synth). ([#7889](https://github.com/googleapis/google-cloud-python/pull/7889)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Change requests intersphinx url (via synth). ([#9407](https://github.com/googleapis/google-cloud-python/pull/9407)) +- Update docstrings (via synth). ([#9350](https://github.com/googleapis/google-cloud-python/pull/9350)) +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Normalize docs. ([#8994](https://github.com/googleapis/google-cloud-python/pull/8994)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) + +### Internal / Testing Changes +- Pin black version (via synth). ([#8590](https://github.com/googleapis/google-cloud-python/pull/8590)) + ## 0.1.0 04-30-2019 15:03 PDT diff --git a/phishingprotection/docs/conf.py b/phishingprotection/docs/conf.py index 3ae6ea620720..7ecb748d61e6 100644 --- a/phishingprotection/docs/conf.py +++ b/phishingprotection/docs/conf.py @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/phishing_protection_service_client.py b/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/phishing_protection_service_client.py index add1fc161b35..e0b48c8f8195 100644 --- a/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/phishing_protection_service_client.py +++ b/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/phishing_protection_service_client.py @@ -206,9 +206,10 @@ def report_phishing( ): """ Reports a URI suspected of containing phishing content to be reviewed. - Once the report review is completed, if its result verifies the - existince of malicious phishing content, the site will be added the to - `Google's Social Engineering + Once the report review is complete, its result can be found in the Cloud + Security Command Center findings dashboard for Phishing Protection. If + the result verifies the existence of malicious phishing content, the + site will be added the to `Google's Social Engineering lists `__ in order to protect users that could get exposed to this threat in the future. @@ -228,7 +229,7 @@ def report_phishing( Args: parent (str): Required. The name of the project for which the report will be created, in the format "projects/{project\_number}". - uri (str): The URI that is being reported for phishing content to be analyzed. + uri (str): Required. The URI that is being reported for phishing content to be analyzed. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. diff --git a/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/transports/phishing_protection_service_grpc_transport.py b/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/transports/phishing_protection_service_grpc_transport.py index 8498f361b1ac..59150898b6ed 100644 --- a/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/transports/phishing_protection_service_grpc_transport.py +++ b/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/transports/phishing_protection_service_grpc_transport.py @@ -117,9 +117,10 @@ def report_phishing(self): """Return the gRPC stub for :meth:`PhishingProtectionServiceClient.report_phishing`. Reports a URI suspected of containing phishing content to be reviewed. - Once the report review is completed, if its result verifies the - existince of malicious phishing content, the site will be added the to - `Google's Social Engineering + Once the report review is complete, its result can be found in the Cloud + Security Command Center findings dashboard for Phishing Protection. If + the result verifies the existence of malicious phishing content, the + site will be added the to `Google's Social Engineering lists `__ in order to protect users that could get exposed to this threat in the future. diff --git a/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto b/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto index 62a9fd03e3df..ce0c3624d818 100644 --- a/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto +++ b/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto @@ -18,6 +18,9 @@ syntax = "proto3"; package google.cloud.phishingprotection.v1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; option csharp_namespace = "Google.Cloud.PhishingProtection.V1Beta1"; option go_package = "google.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1;phishingprotection"; @@ -29,17 +32,22 @@ option php_namespace = "Google\\Cloud\\PhishingProtection\\V1beta1"; // Service to report phishing URIs. service PhishingProtectionServiceV1Beta1 { + option (google.api.default_host) = "phishingprotection.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Reports a URI suspected of containing phishing content to be reviewed. Once - // the report review is completed, if its result verifies the existince of - // malicious phishing content, the site will be added the to [Google's Social - // Engineering lists](https://support.google.com/webmasters/answer/6350487/) - // in order to protect users that could get exposed to this threat in - // the future. + // the report review is complete, its result can be found in the Cloud + // Security Command Center findings dashboard for Phishing Protection. If the + // result verifies the existence of malicious phishing content, the site will + // be added the to [Google's Social Engineering + // lists](https://support.google.com/webmasters/answer/6350487/) in order to + // protect users that could get exposed to this threat in the future. rpc ReportPhishing(ReportPhishingRequest) returns (ReportPhishingResponse) { option (google.api.http) = { post: "/v1beta1/{parent=projects/*}/phishing:report" body: "*" }; + option (google.api.method_signature) = "parent,uri"; } } @@ -47,11 +55,18 @@ service PhishingProtectionServiceV1Beta1 { message ReportPhishingRequest { // Required. The name of the project for which the report will be created, // in the format "projects/{project_number}". - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; - // The URI that is being reported for phishing content to be analyzed. - string uri = 2; + // Required. The URI that is being reported for phishing content to be analyzed. + string uri = 2 [(google.api.field_behavior) = REQUIRED]; } // The ReportPhishing (empty) response message. -message ReportPhishingResponse {} +message ReportPhishingResponse { + +} diff --git a/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2.py b/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2.py index 91b33898f9cf..72631da0878f 100644 --- a/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2.py +++ b/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -26,9 +29,14 @@ "\n%com.google.phishingprotection.v1beta1B\027PhishingProtectionProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1;phishingprotection\242\002\004GCPP\252\002'Google.Cloud.PhishingProtection.V1Beta1\312\002'Google\\Cloud\\PhishingProtection\\V1beta1" ), serialized_pb=_b( - '\nFgoogle/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto\x12\'google.cloud.phishingprotection.v1beta1\x1a\x1cgoogle/api/annotations.proto"4\n\x15ReportPhishingRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0b\n\x03uri\x18\x02 \x01(\t"\x18\n\x16ReportPhishingResponse2\xef\x01\n PhishingProtectionServiceV1Beta1\x12\xca\x01\n\x0eReportPhishing\x12>.google.cloud.phishingprotection.v1beta1.ReportPhishingRequest\x1a?.google.cloud.phishingprotection.v1beta1.ReportPhishingResponse"7\x82\xd3\xe4\x93\x02\x31",/v1beta1/{parent=projects/*}/phishing:report:\x01*B\xf8\x01\n%com.google.phishingprotection.v1beta1B\x17PhishingProtectionProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1;phishingprotection\xa2\x02\x04GCPP\xaa\x02\'Google.Cloud.PhishingProtection.V1Beta1\xca\x02\'Google\\Cloud\\PhishingProtection\\V1beta1b\x06proto3' + '\nFgoogle/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto\x12\'google.cloud.phishingprotection.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto"n\n\x15ReportPhishingRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x10\n\x03uri\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x18\n\x16ReportPhishingResponse2\xd3\x02\n PhishingProtectionServiceV1Beta1\x12\xd7\x01\n\x0eReportPhishing\x12>.google.cloud.phishingprotection.v1beta1.ReportPhishingRequest\x1a?.google.cloud.phishingprotection.v1beta1.ReportPhishingResponse"D\x82\xd3\xe4\x93\x02\x31",/v1beta1/{parent=projects/*}/phishing:report:\x01*\xda\x41\nparent,uri\x1aU\xca\x41!phishingprotection.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xf8\x01\n%com.google.phishingprotection.v1beta1B\x17PhishingProtectionProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1;phishingprotection\xa2\x02\x04GCPP\xaa\x02\'Google.Cloud.PhishingProtection.V1Beta1\xca\x02\'Google\\Cloud\\PhishingProtection\\V1beta1b\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + ], ) @@ -54,7 +62,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -72,7 +82,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -84,8 +94,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=145, - serialized_end=197, + serialized_start=230, + serialized_end=340, ) @@ -104,8 +114,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=199, - serialized_end=223, + serialized_start=342, + serialized_end=366, ) DESCRIPTOR.message_types_by_name["ReportPhishingRequest"] = _REPORTPHISHINGREQUEST @@ -126,8 +136,8 @@ Required. The name of the project for which the report will be created, in the format "projects/{project\_number}". uri: - The URI that is being reported for phishing content to be - analyzed. + Required. The URI that is being reported for phishing content + to be analyzed. """, # @@protoc_insertion_point(class_scope:google.cloud.phishingprotection.v1beta1.ReportPhishingRequest) ), @@ -149,15 +159,19 @@ DESCRIPTOR._options = None +_REPORTPHISHINGREQUEST.fields_by_name["parent"]._options = None +_REPORTPHISHINGREQUEST.fields_by_name["uri"]._options = None _PHISHINGPROTECTIONSERVICEV1BETA1 = _descriptor.ServiceDescriptor( name="PhishingProtectionServiceV1Beta1", full_name="google.cloud.phishingprotection.v1beta1.PhishingProtectionServiceV1Beta1", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=226, - serialized_end=465, + serialized_options=_b( + "\312A!phishingprotection.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=369, + serialized_end=708, methods=[ _descriptor.MethodDescriptor( name="ReportPhishing", @@ -167,7 +181,7 @@ input_type=_REPORTPHISHINGREQUEST, output_type=_REPORTPHISHINGRESPONSE, serialized_options=_b( - '\202\323\344\223\0021",/v1beta1/{parent=projects/*}/phishing:report:\001*' + '\202\323\344\223\0021",/v1beta1/{parent=projects/*}/phishing:report:\001*\332A\nparent,uri' ), ) ], diff --git a/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2_grpc.py b/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2_grpc.py index 30c8d4c4b948..26465d3729c2 100644 --- a/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2_grpc.py +++ b/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2_grpc.py @@ -29,11 +29,12 @@ class PhishingProtectionServiceV1Beta1Servicer(object): def ReportPhishing(self, request, context): """Reports a URI suspected of containing phishing content to be reviewed. Once - the report review is completed, if its result verifies the existince of - malicious phishing content, the site will be added the to [Google's Social - Engineering lists](https://support.google.com/webmasters/answer/6350487/) - in order to protect users that could get exposed to this threat in - the future. + the report review is complete, its result can be found in the Cloud + Security Command Center findings dashboard for Phishing Protection. If the + result verifies the existence of malicious phishing content, the site will + be added the to [Google's Social Engineering + lists](https://support.google.com/webmasters/answer/6350487/) in order to + protect users that could get exposed to this threat in the future. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/phishingprotection/setup.py b/phishingprotection/setup.py index d837cfe294af..711d727c0470 100644 --- a/phishingprotection/setup.py +++ b/phishingprotection/setup.py @@ -21,8 +21,8 @@ name = "google-cloud-phishing-protection" description = "Phishing Protection API API client library" -version = "0.1.0" -release_status = "3 - Alpha" +version = "0.2.0" +release_status = "Development Status :: 3 - Alpha" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", 'enum34; python_version < "3.4"', diff --git a/phishingprotection/synth.metadata b/phishingprotection/synth.metadata index 0ddff81eb85a..9d709c78872a 100644 --- a/phishingprotection/synth.metadata +++ b/phishingprotection/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:33:35.867649Z", + "updateTime": "2019-10-05T12:30:18.605559Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" } }, { diff --git a/pubsub/CHANGELOG.md b/pubsub/CHANGELOG.md index 347266bd85dc..09716f05a9cf 100644 --- a/pubsub/CHANGELOG.md +++ b/pubsub/CHANGELOG.md @@ -4,6 +4,30 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 1.0.2 + +09-30-2019 11:57 PDT + + +### Implementation Changes + +- Streaming pull shouldn't need `subscriptions.get` permission ([#9360](https://github.com/googleapis/google-cloud-python/pull/9360)). + +## 1.0.1 + +09-27-2019 07:01 PDT + + +### Implementation Changes +- Set default stream ACK deadline to subscriptions'. ([#9268](https://github.com/googleapis/google-cloud-python/pull/9268)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Link to correct TimeoutError in futures docs. ([#9216](https://github.com/googleapis/google-cloud-python/pull/9216)) + +### Internal / Testing Changes +- Adjust messaging RPC timeout settings (via synth). [#9279](https://github.com/googleapis/google-cloud-python/pull/9279) + ## 1.0.0 08-29-2019 09:27 PDT diff --git a/pubsub/docs/conf.py b/pubsub/docs/conf.py index cc9198fc4b3b..472fe878fe8f 100644 --- a/pubsub/docs/conf.py +++ b/pubsub/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/pubsub/google/cloud/pubsub_v1/futures.py b/pubsub/google/cloud/pubsub_v1/futures.py index 21d5d810199f..ba861e40c653 100644 --- a/pubsub/google/cloud/pubsub_v1/futures.py +++ b/pubsub/google/cloud/pubsub_v1/futures.py @@ -74,9 +74,7 @@ def running(self): bool: ``True`` if this method has not yet completed, or ``False`` if it has completed. """ - if self.done(): - return False - return True + return not self.done() def done(self): """Return True the future is done, False otherwise. @@ -94,7 +92,7 @@ def result(self, timeout=None): times out and raises TimeoutError. Raises: - ~.pubsub_v1.TimeoutError: If the request times out. + concurrent.futures.TimeoutError: If the request times out. Exception: For undefined exceptions in the underlying call execution. """ @@ -114,7 +112,7 @@ def exception(self, timeout=None): times out and raises TimeoutError. Raises: - TimeoutError: If the request times out. + concurrent.futures.TimeoutError: If the request times out. Returns: Exception: The exception raised by the call, if any. diff --git a/pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py index 33f0af827924..1b8982b639b2 100644 --- a/pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ b/pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py @@ -29,9 +29,9 @@ "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 25000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 30000, + "initial_rpc_timeout_millis": 5000, + "rpc_timeout_multiplier": 1.3, + "max_rpc_timeout_millis": 600000, "total_timeout_millis": 600000, }, }, diff --git a/pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py index 0aa68315c1c0..083a6c19a440 100644 --- a/pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ b/pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -20,9 +20,9 @@ "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 25000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 25000, + "initial_rpc_timeout_millis": 5000, + "rpc_timeout_multiplier": 1.3, + "max_rpc_timeout_millis": 600000, "total_timeout_millis": 600000, }, "streaming_messaging": { diff --git a/pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index 117ee12b8463..726e93166cda 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -74,6 +74,9 @@ def __init__(self, client, topic, settings, autocommit=True): self._state_lock = threading.Lock() # These members are all communicated between threads; ensure that # any writes to them use the "state lock" to remain atomic. + # _futures list should remain unchanged after batch + # status changed from ACCEPTING_MESSAGES to any other + # in order to avoid race conditions self._futures = [] self._messages = [] self._size = 0 diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 05a4161e889a..60a03bb652ab 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -134,6 +134,7 @@ def __init__(self, batch_settings=(), **kwargs): # messages. One batch exists for each topic. self._batch_lock = self._batch_class.make_lock() self._batches = {} + self._is_stopped = False @classmethod def from_service_account_file(cls, filename, batch_settings=(), **kwargs): @@ -187,20 +188,19 @@ def _batch(self, topic, create=False, autocommit=True): """ # If there is no matching batch yet, then potentially create one # and place it on the batches dictionary. - with self._batch_lock: - if not create: - batch = self._batches.get(topic) - if batch is None: - create = True - - if create: - batch = self._batch_class( - autocommit=autocommit, - client=self, - settings=self.batch_settings, - topic=topic, - ) - self._batches[topic] = batch + if not create: + batch = self._batches.get(topic) + if batch is None: + create = True + + if create: + batch = self._batch_class( + autocommit=autocommit, + client=self, + settings=self.batch_settings, + topic=topic, + ) + self._batches[topic] = batch return batch @@ -242,12 +242,17 @@ def publish(self, topic, data, **attrs): instance that conforms to Python Standard library's :class:`~concurrent.futures.Future` interface (but not an instance of that class). + + Raises: + RuntimeError: + If called after publisher has been stopped + by a `stop()` method call. """ # Sanity check: Is the data being sent as a bytestring? # If it is literally anything else, complain loudly about it. if not isinstance(data, six.binary_type): raise TypeError( - "Data being published to Pub/Sub must be sent " "as a bytestring." + "Data being published to Pub/Sub must be sent as a bytestring." ) # Coerce all attributes to text strings. @@ -266,11 +271,38 @@ def publish(self, topic, data, **attrs): message = types.PubsubMessage(data=data, attributes=attrs) # Delegate the publishing to the batch. - batch = self._batch(topic) - future = None - while future is None: - future = batch.publish(message) - if future is None: - batch = self._batch(topic, create=True) + with self._batch_lock: + if self._is_stopped: + raise RuntimeError("Cannot publish on a stopped publisher.") + + batch = self._batch(topic) + future = None + while future is None: + future = batch.publish(message) + if future is None: + batch = self._batch(topic, create=True) return future + + def stop(self): + """Immediately publish all outstanding messages. + + Asynchronously sends all outstanding messages and + prevents future calls to `publish()`. Method should + be invoked prior to deleting this `Client()` object + in order to ensure that no pending messages are lost. + + .. note:: + + This method is non-blocking. Use `Future()` objects + returned by `publish()` to make sure all publish + requests completed, either in success or error. + """ + with self._batch_lock: + if self._is_stopped: + raise RuntimeError("Cannot stop a publisher already stopped.") + + self._is_stopped = True + + for batch in self._batches.values(): + batch.commit() diff --git a/pubsub/google/cloud/pubsub_v1/publisher/futures.py b/pubsub/google/cloud/pubsub_v1/publisher/futures.py index ed200041177b..fa8a79998617 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -39,7 +39,7 @@ def result(self, timeout=None): str: The message ID. Raises: - ~.pubsub_v1.TimeoutError: If the request times out. + concurrent.futures.TimeoutError: If the request times out. Exception: For undefined exceptions in the underlying call execution. """ diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index af6883fd067e..d3b1d6f51eb6 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -44,12 +44,20 @@ exceptions.GatewayTimeout, exceptions.Aborted, ) +_TERMINATING_STREAM_ERRORS = (exceptions.Cancelled,) _MAX_LOAD = 1.0 """The load threshold above which to pause the incoming message stream.""" _RESUME_THRESHOLD = 0.8 """The load threshold below which to resume the incoming message stream.""" +_DEFAULT_STREAM_ACK_DEADLINE = 60 +"""The default message acknowledge deadline in seconds for incoming message stream. + +This default deadline is dynamically modified for the messages that are added +to the lease management. +""" + def _maybe_wrap_exception(exception): """Wraps a gRPC exception class, if needed.""" @@ -208,7 +216,7 @@ def load(self): float: The load value. """ if self._leaser is None: - return 0 + return 0.0 return max( [ @@ -384,14 +392,36 @@ def open(self, callback, on_callback_error): ) # Create the RPC + + # We must use a fixed value for the ACK deadline, as we cannot read it + # from the subscription. The latter would require `pubsub.subscriptions.get` + # permission, which is not granted to the default subscriber role + # `roles/pubsub.subscriber`. + # See also https://github.com/googleapis/google-cloud-python/issues/9339 + # + # When dynamic lease management is enabled for the "on hold" messages, + # the default stream ACK deadline should again be set based on the + # historic ACK timing data, i.e. `self.ack_histogram.percentile(99)`. + stream_ack_deadline_seconds = _DEFAULT_STREAM_ACK_DEADLINE + + get_initial_request = functools.partial( + self._get_initial_request, stream_ack_deadline_seconds + ) self._rpc = bidi.ResumableBidiRpc( start_rpc=self._client.api.streaming_pull, - initial_request=self._get_initial_request, + initial_request=get_initial_request, should_recover=self._should_recover, + should_terminate=self._should_terminate, throttle_reopen=True, ) self._rpc.add_done_callback(self._on_rpc_done) + _LOGGER.debug( + "Creating a stream, default ACK deadline set to {} seconds.".format( + stream_ack_deadline_seconds + ) + ) + # Create references to threads self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) @@ -462,12 +492,16 @@ def close(self, reason=None): for callback in self._close_callbacks: callback(self, reason) - def _get_initial_request(self): + def _get_initial_request(self, stream_ack_deadline_seconds): """Return the initial request for the RPC. This defines the initial request that must always be sent to Pub/Sub immediately upon opening the subscription. + Args: + stream_ack_deadline_seconds (int): + The default message acknowledge deadline for the stream. + Returns: google.cloud.pubsub_v1.types.StreamingPullRequest: A request suitable for being the first request on the stream (and not @@ -486,7 +520,7 @@ def _get_initial_request(self): request = types.StreamingPullRequest( modify_deadline_ack_ids=list(lease_ids), modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), - stream_ack_deadline_seconds=self.ack_histogram.percentile(99), + stream_ack_deadline_seconds=stream_ack_deadline_seconds, subscription=self._subscription, ) @@ -511,14 +545,6 @@ def _on_response(self, response): self._messages_on_hold.qsize(), ) - # Immediately modack the messages we received, as this tells the server - # that we've received them. - items = [ - requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) - for message in response.received_messages - ] - self._dispatcher.modify_ack_deadline(items) - invoke_callbacks_for = [] for received_message in response.received_messages: @@ -535,6 +561,15 @@ def _on_response(self, response): else: self._messages_on_hold.put(message) + # Immediately (i.e. without waiting for the auto lease management) + # modack the messages we received and not put on hold, as this tells + # the server that we've received them. + items = [ + requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) + for message in invoke_callbacks_for + ] + self._dispatcher.modify_ack_deadline(items) + _LOGGER.debug( "Scheduling callbacks for %s new messages, new total on hold %s.", len(invoke_callbacks_for), @@ -565,6 +600,26 @@ def _should_recover(self, exception): _LOGGER.info("Observed non-recoverable stream error %s", exception) return False + def _should_terminate(self, exception): + """Determine if an error on the RPC stream should be terminated. + + If the exception is one of the terminating exceptions, this will signal + to the consumer thread that it should terminate. + + This will cause the stream to exit when it returns :data:`True`. + + Returns: + bool: Indicates if the caller should terminate or attempt recovery. + Will be :data:`True` if the ``exception`` is "acceptable", i.e. + in a list of terminating exceptions. + """ + exception = _maybe_wrap_exception(exception) + if isinstance(exception, _TERMINATING_STREAM_ERRORS): + _LOGGER.info("Observed terminating stream error %s", exception) + return True + _LOGGER.info("Observed non-terminating stream error %s", exception) + return False + def _on_rpc_done(self, future): """Triggered whenever the underlying RPC terminates without recovery. diff --git a/pubsub/setup.py b/pubsub/setup.py index 9a13e8ae0f2c..45e2cc04c07d 100644 --- a/pubsub/setup.py +++ b/pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.0.0" +version = "1.0.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/pubsub/synth.metadata b/pubsub/synth.metadata index 1686bce7e232..9dfcec7b72e4 100644 --- a/pubsub/synth.metadata +++ b/pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-24T12:27:35.100766Z", + "updateTime": "2019-09-24T12:27:36.347590Z", "sources": [ { "generator": { "name": "artman", - "version": "0.34.0", - "dockerImage": "googleapis/artman@sha256:38a27ba6245f96c3e86df7acb2ebcc33b4f186d9e475efe2d64303aec3d4e0ea" + "version": "0.37.0", + "dockerImage": "googleapis/artman@sha256:0f66008f69061ea6d41499e2a34da3fc64fc7c9798077e3a37158653a135d801" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "b97af5f7fea49d533900b62cca171da0e49743de", - "internalRef": "265156479" + "sha": "fe6115fdfae318277426ec0e11b4b05e2b150723", + "internalRef": "270882829" } }, { diff --git a/pubsub/tests/system.py b/pubsub/tests/system.py index 7ffb4a580194..fd7473e1e53b 100644 --- a/pubsub/tests/system.py +++ b/pubsub/tests/system.py @@ -17,6 +17,7 @@ import datetime import itertools import operator as op +import os import threading import time @@ -381,6 +382,59 @@ class CallbackError(Exception): with pytest.raises(CallbackError): future.result(timeout=30) + @pytest.mark.xfail( + reason="The default stream ACK deadline is static and received messages " + "exceeding FlowControl.max_messages are currently not lease managed." + ) + def test_streaming_pull_ack_deadline( + self, publisher, subscriber, project, topic_path, subscription_path, cleanup + ): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # Create a topic and a subscription, then subscribe to the topic. This + # must happen before the messages are published. + publisher.create_topic(topic_path) + + # Subscribe to the topic. This must happen before the messages + # are published. + subscriber.create_subscription( + subscription_path, topic_path, ack_deadline_seconds=240 + ) + + # publish some messages and wait for completion + self._publish_messages(publisher, topic_path, batch_sizes=[2]) + + # subscribe to the topic + callback = StreamingPullCallback( + processing_time=70, # more than the default stream ACK deadline (60s) + resolve_at_msg_count=3, # one more than the published messages count + ) + flow_control = types.FlowControl(max_messages=1) + subscription_future = subscriber.subscribe( + subscription_path, callback, flow_control=flow_control + ) + + # We expect to process the first two messages in 2 * 70 seconds, and + # any duplicate message that is re-sent by the backend in additional + # 70 seconds, totalling 210 seconds (+ overhead) --> if there have been + # no duplicates in 240 seconds, we can reasonably assume that there + # won't be any. + try: + callback.done_future.result(timeout=240) + except exceptions.TimeoutError: + # future timed out, because we received no excessive messages + assert sorted(callback.seen_message_ids) == [1, 2] + else: + pytest.fail( + "Expected to receive 2 messages, but got at least {}.".format( + len(callback.seen_message_ids) + ) + ) + finally: + subscription_future.cancel() + def test_streaming_pull_max_messages( self, publisher, topic_path, subscriber, subscription_path, cleanup ): @@ -435,6 +489,45 @@ def test_streaming_pull_max_messages( finally: subscription_future.cancel() # trigger clean shutdown + @pytest.mark.skipif( + "KOKORO_GFILE_DIR" not in os.environ, + reason="Requires Kokoro environment with a limited subscriber service account.", + ) + def test_streaming_pull_subscriber_permissions_sufficient( + self, publisher, topic_path, subscriber, subscription_path, cleanup + ): + + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # create a topic and subscribe to it + publisher.create_topic(topic_path) + subscriber.create_subscription(subscription_path, topic_path) + + # A service account granting only the pubsub.subscriber role must be used. + filename = os.path.join( + os.environ["KOKORO_GFILE_DIR"], "pubsub-subscriber-service-account.json" + ) + streaming_pull_subscriber = type(subscriber).from_service_account_file(filename) + + # Subscribe to the topic, publish a message, and verify that subscriber + # successfully pulls and processes it. + callback = StreamingPullCallback(processing_time=0.01, resolve_at_msg_count=1) + future = streaming_pull_subscriber.subscribe(subscription_path, callback) + self._publish_messages(publisher, topic_path, batch_sizes=[1]) + + try: + callback.done_future.result(timeout=10) + except exceptions.TimeoutError: + pytest.fail( + "Timeout: receiving/processing streamed messages took too long." + ) + else: + assert 1 in callback.seen_message_ids + finally: + future.cancel() + def _publish_messages(self, publisher, topic_path, batch_sizes): """Publish ``count`` messages in batches and wait until completion.""" publish_futures = [] diff --git a/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 05e4c8c67209..6519b2b23149 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -201,6 +201,36 @@ def test_publish_attrs_type_error(): client.publish(topic, b"foo", answer=42) +def test_stop(): + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) + + batch = client._batch("topic1", autocommit=False) + batch2 = client._batch("topic2", autocommit=False) + + pubsub_msg = types.PubsubMessage(data=b"msg") + + patch = mock.patch.object(batch, "commit") + patch2 = mock.patch.object(batch2, "commit") + + with patch as commit_mock, patch2 as commit_mock2: + batch.publish(pubsub_msg) + batch2.publish(pubsub_msg) + + client.stop() + + # check if commit() called + commit_mock.assert_called() + commit_mock2.assert_called() + + # check that closed publisher doesn't accept new messages + with pytest.raises(RuntimeError): + client.publish("topic1", b"msg2") + + with pytest.raises(RuntimeError): + client.stop() + + def test_gapic_instance_method(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 877ccf97fd9a..a69ea5ca5268 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -404,6 +404,8 @@ def test_heartbeat_inactive(): "google.cloud.pubsub_v1.subscriber._protocol.heartbeater.Heartbeater", autospec=True ) def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bidi_rpc): + stream_ack_deadline = streaming_pull_manager._DEFAULT_STREAM_ACK_DEADLINE + manager = make_manager() manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) @@ -426,10 +428,16 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi resumable_bidi_rpc.assert_called_once_with( start_rpc=manager._client.api.streaming_pull, - initial_request=manager._get_initial_request, + initial_request=mock.ANY, should_recover=manager._should_recover, + should_terminate=manager._should_terminate, throttle_reopen=True, ) + initial_request_arg = resumable_bidi_rpc.call_args.kwargs["initial_request"] + assert initial_request_arg.func == manager._get_initial_request + assert initial_request_arg.args[0] == stream_ack_deadline + assert not manager._client.api.get_subscription.called + resumable_bidi_rpc.return_value.add_done_callback.assert_called_once_with( manager._on_rpc_done ) @@ -574,11 +582,11 @@ def test__get_initial_request(): manager._leaser = mock.create_autospec(leaser.Leaser, instance=True) manager._leaser.ack_ids = ["1", "2"] - initial_request = manager._get_initial_request() + initial_request = manager._get_initial_request(123) assert isinstance(initial_request, types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" - assert initial_request.stream_ack_deadline_seconds == 10 + assert initial_request.stream_ack_deadline_seconds == 123 assert initial_request.modify_deadline_ack_ids == ["1", "2"] assert initial_request.modify_deadline_seconds == [10, 10] @@ -587,11 +595,11 @@ def test__get_initial_request_wo_leaser(): manager = make_manager() manager._leaser = None - initial_request = manager._get_initial_request() + initial_request = manager._get_initial_request(123) assert isinstance(initial_request, types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" - assert initial_request.stream_ack_deadline_seconds == 10 + assert initial_request.stream_ack_deadline_seconds == 123 assert initial_request.modify_deadline_ack_ids == [] assert initial_request.modify_deadline_seconds == [] @@ -660,12 +668,10 @@ def test__on_response_with_leaser_overload(): # are called in the expected way. manager._on_response(response) + # only the messages that are added to the lease management and dispatched to + # callbacks should have their ACK deadline extended dispatcher.modify_ack_deadline.assert_called_once_with( - [ - requests.ModAckRequest("fack", 10), - requests.ModAckRequest("back", 10), - requests.ModAckRequest("zack", 10), - ] + [requests.ModAckRequest("fack", 10)] ) # one message should be scheduled, the leaser capacity allows for it @@ -719,6 +725,23 @@ def test__should_recover_false(): assert manager._should_recover(exc) is False +def test__should_terminate_true(): + manager = make_manager() + + details = "Cancelled. Go away, before I taunt you a second time." + exc = exceptions.Cancelled(details) + + assert manager._should_terminate(exc) is True + + +def test__should_terminate_false(): + manager = make_manager() + + exc = TypeError("wahhhhhh") + + assert manager._should_terminate(exc) is False + + @mock.patch("threading.Thread", autospec=True) def test__on_rpc_done(thread): manager = make_manager() diff --git a/recommender/.coveragerc b/recommender/.coveragerc new file mode 100644 index 000000000000..b178b094aa1d --- /dev/null +++ b/recommender/.coveragerc @@ -0,0 +1,19 @@ +# Generated by synthtool. DO NOT EDIT! +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore abstract methods + raise NotImplementedError +omit = + */gapic/*.py + */proto/*.py + */core/*.py + */site-packages/*.py \ No newline at end of file diff --git a/recommender/.flake8 b/recommender/.flake8 new file mode 100644 index 000000000000..0268ecc9c55c --- /dev/null +++ b/recommender/.flake8 @@ -0,0 +1,14 @@ +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + *_pb2.py + + # Standard linting exemptions. + __pycache__, + .git, + *.pyc, + conf.py diff --git a/recommender/.repo-metadata.json b/recommender/.repo-metadata.json new file mode 100644 index 000000000000..b3598c302da7 --- /dev/null +++ b/recommender/.repo-metadata.json @@ -0,0 +1,13 @@ +{ + "name": "recommender", + "name_pretty": "Cloud Recommender API", + "product_documentation": "https://cloud.google.com/recommender", + "client_documentation": "https://googleapis.dev/python/recommender/latest", + "issue_tracker": "", + "release_level": "alpha", + "language": "python", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-recommender", + "api_id": "recommender.googleapis.com", + "requires_billing": true +} \ No newline at end of file diff --git a/recommender/CHANGELOG.md b/recommender/CHANGELOG.md new file mode 100644 index 000000000000..b59d7ccaa076 --- /dev/null +++ b/recommender/CHANGELOG.md @@ -0,0 +1,13 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-recommender/#history + +## 0.1.0 + +09-27-2019 12:20 PDT + +### New Features +- initial release of v1beta1 ([#9257](https://github.com/googleapis/google-cloud-python/pull/9257)) + diff --git a/recommender/LICENSE b/recommender/LICENSE new file mode 100644 index 000000000000..a8ee855de2aa --- /dev/null +++ b/recommender/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/recommender/MANIFEST.in b/recommender/MANIFEST.in new file mode 100644 index 000000000000..9cbf175afe6b --- /dev/null +++ b/recommender/MANIFEST.in @@ -0,0 +1,5 @@ +include README.rst LICENSE +recursive-include google *.json *.proto +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/recommender/README.rst b/recommender/README.rst new file mode 100644 index 000000000000..a9eff8727c19 --- /dev/null +++ b/recommender/README.rst @@ -0,0 +1,80 @@ +Python Client for Recommender API (`Alpha`_) +============================================ + +`Recommender API`_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Alpha: https://github.com/googleapis/google-cloud-python/blob/master/README.rst +.. _Recommender API: https://cloud.google.com/recommender +.. _Client Library Documentation: https://googleapis.dev/python/recommender/latest +.. _Product Documentation: https://cloud.google.com/recommender/docs + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Recommender API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Recommender API.: https://cloud.google.com/recommender +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Python >= 3.5 + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-recommender + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-recommender + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Recommender API + API to see other available methods on the client. +- Read the `Recommender API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Recommender API Product documentation: https://cloud.google.com/recommender +.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst diff --git a/recommender/docs/README.rst b/recommender/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/recommender/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/recommender/docs/conf.py b/recommender/docs/conf.py new file mode 100644 index 000000000000..fea4c9ab3b66 --- /dev/null +++ b/recommender/docs/conf.py @@ -0,0 +1,363 @@ +# -*- coding: utf-8 -*- +# +# google-cloud-recommender documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-recommender" +copyright = u"2017, Google" +author = u"Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-recommender-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-recommender.tex", + u"google-cloud-recommender Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-recommender", + u"google-cloud-recommender Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-recommender", + u"google-cloud-recommender Documentation", + author, + "google-cloud-recommender", + "GAPIC library for the {metadata.shortName} v1beta1 service", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), + "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), + "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/recommender/docs/gapic/v1beta1/api.rst b/recommender/docs/gapic/v1beta1/api.rst new file mode 100644 index 000000000000..d9458d258e2a --- /dev/null +++ b/recommender/docs/gapic/v1beta1/api.rst @@ -0,0 +1,6 @@ +Client for Recommender API +========================== + +.. automodule:: google.cloud.recommender_v1beta1 + :members: + :inherited-members: \ No newline at end of file diff --git a/recommender/docs/gapic/v1beta1/types.rst b/recommender/docs/gapic/v1beta1/types.rst new file mode 100644 index 000000000000..3771da42e95f --- /dev/null +++ b/recommender/docs/gapic/v1beta1/types.rst @@ -0,0 +1,5 @@ +Types for Recommender API Client +================================ + +.. automodule:: google.cloud.recommender_v1beta1.types + :members: \ No newline at end of file diff --git a/recommender/docs/index.rst b/recommender/docs/index.rst new file mode 100644 index 000000000000..37c319bfaffe --- /dev/null +++ b/recommender/docs/index.rst @@ -0,0 +1,9 @@ +.. include:: README.rst + +Api Reference +------------- +.. toctree:: + :maxdepth: 2 + + gapic/v1beta1/api + gapic/v1beta1/types \ No newline at end of file diff --git a/recommender/google/__init__.py b/recommender/google/__init__.py new file mode 100644 index 000000000000..8fcc60e2b9c6 --- /dev/null +++ b/recommender/google/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/recommender/google/cloud/__init__.py b/recommender/google/cloud/__init__.py new file mode 100644 index 000000000000..8fcc60e2b9c6 --- /dev/null +++ b/recommender/google/cloud/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/recommender/google/cloud/recommender.py b/recommender/google/cloud/recommender.py new file mode 100644 index 000000000000..3caae5792568 --- /dev/null +++ b/recommender/google/cloud/recommender.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.cloud.recommender_v1beta1 import RecommenderClient +from google.cloud.recommender_v1beta1 import enums +from google.cloud.recommender_v1beta1 import types + + +__all__ = ("enums", "types", "RecommenderClient") diff --git a/recommender/google/cloud/recommender_v1beta1/__init__.py b/recommender/google/cloud/recommender_v1beta1/__init__.py new file mode 100644 index 000000000000..2ea08cfa4d18 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.cloud.recommender_v1beta1 import types +from google.cloud.recommender_v1beta1.gapic import enums +from google.cloud.recommender_v1beta1.gapic import recommender_client + + +class RecommenderClient(recommender_client.RecommenderClient): + __doc__ = recommender_client.RecommenderClient.__doc__ + enums = enums + + +__all__ = ("enums", "types", "RecommenderClient") diff --git a/recommender/google/cloud/recommender_v1beta1/gapic/__init__.py b/recommender/google/cloud/recommender_v1beta1/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/recommender/google/cloud/recommender_v1beta1/gapic/enums.py b/recommender/google/cloud/recommender_v1beta1/gapic/enums.py new file mode 100644 index 000000000000..5d7fbb6b7a31 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/gapic/enums.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class NullValue(enum.IntEnum): + """ + ``NullValue`` is a singleton enumeration to represent the null value for + the ``Value`` type union. + + The JSON representation for ``NullValue`` is JSON ``null``. + + Attributes: + NULL_VALUE (int): Null value. + """ + + NULL_VALUE = 0 + + +class Impact(object): + class Category(enum.IntEnum): + """ + The category of the impact. + + Attributes: + CATEGORY_UNSPECIFIED (int): Default unspecified category. Don't use directly. + COST (int): Indicates a potential increase or decrease in cost. + SECURITY (int): Indicates a potential increase or decrease in security. + PERFORMANCE (int): Indicates a potential increase or decrease in performance. + """ + + CATEGORY_UNSPECIFIED = 0 + COST = 1 + SECURITY = 2 + PERFORMANCE = 3 + + +class RecommendationStateInfo(object): + class State(enum.IntEnum): + """ + Represents Recommendation State + + Attributes: + STATE_UNSPECIFIED (int): Default state. Don't use directly. + ACTIVE (int): Recommendation is active and can be applied. Recommendations content can + be updated by Google. + + ACTIVE recommendations can be marked as CLAIMED, SUCCEEDED, or FAILED. + CLAIMED (int): Recommendation is in claimed state. Recommendations content is + immutable and cannot be updated by Google. + + CLAIMED recommendations can be marked as CLAIMED, SUCCEEDED, or FAILED. + SUCCEEDED (int): Recommendation is in succeeded state. Recommendations content is + immutable and cannot be updated by Google. + + SUCCEEDED recommendations can be marked as SUCCEEDED, or FAILED. + FAILED (int): Recommendation is in failed state. Recommendations content is immutable + and cannot be updated by Google. + + FAILED recommendations can be marked as SUCCEEDED, or FAILED. + DISMISSED (int): Recommendation is in dismissed state. Recommendation content can be + updated by Google. + + DISMISSED recommendations can be marked as ACTIVE. + """ + + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CLAIMED = 6 + SUCCEEDED = 3 + FAILED = 4 + DISMISSED = 5 diff --git a/recommender/google/cloud/recommender_v1beta1/gapic/recommender_client.py b/recommender/google/cloud/recommender_v1beta1/gapic/recommender_client.py new file mode 100644 index 000000000000..deb485bb83f3 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/gapic/recommender_client.py @@ -0,0 +1,670 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.cloud.recommender.v1beta1 Recommender API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template +import grpc + +from google.cloud.recommender_v1beta1.gapic import enums +from google.cloud.recommender_v1beta1.gapic import recommender_client_config +from google.cloud.recommender_v1beta1.gapic.transports import recommender_grpc_transport +from google.cloud.recommender_v1beta1.proto import recommendation_pb2 +from google.cloud.recommender_v1beta1.proto import recommender_service_pb2 +from google.cloud.recommender_v1beta1.proto import recommender_service_pb2_grpc + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-recommender" +).version + + +class RecommenderClient(object): + """ + Provides recommendations for cloud customers for various categories like + performance optimization, cost savings, reliability, feature discovery, etc. + These recommendations are generated automatically based on analysis of user + resources, configuration and monitoring metrics. + """ + + SERVICE_ADDRESS = "recommender.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.cloud.recommender.v1beta1.Recommender" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RecommenderClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def recommendation_path(cls, project, location, recommender, recommendation): + """Return a fully-qualified recommendation string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/recommenders/{recommender}/recommendations/{recommendation}", + project=project, + location=location, + recommender=recommender, + recommendation=recommendation, + ) + + @classmethod + def recommender_path(cls, project, location, recommender): + """Return a fully-qualified recommender string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/recommenders/{recommender}", + project=project, + location=location, + recommender=recommender, + ) + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.RecommenderGrpcTransport, + Callable[[~.Credentials, type], ~.RecommenderGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = recommender_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=recommender_grpc_transport.RecommenderGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = recommender_grpc_transport.RecommenderGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def list_recommendations( + self, + parent, + page_size=None, + filter_=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists recommendations for a Cloud project. Requires the + recommender.\*.list IAM permission for the specified recommender. + + Example: + >>> from google.cloud import recommender_v1beta1 + >>> + >>> client = recommender_v1beta1.RecommenderClient() + >>> + >>> parent = client.recommender_path('[PROJECT]', '[LOCATION]', '[RECOMMENDER]') + >>> + >>> # Iterate over all results + >>> for element in client.list_recommendations(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_recommendations(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The container resource on which to execute the request. + Acceptable formats: + + 1. + + "projects/[PROJECT\_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER\_ID]", + + LOCATION here refers to GCP Locations: + https://cloud.google.com/about/locations/ + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + filter_ (str): Filter expression to restrict the recommendations returned. Supported + filter fields: state\_info.state Eg: \`state\_info.state:"DISMISSED" or + state\_info.state:"FAILED" + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.recommender_v1beta1.types.Recommendation` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_recommendations" not in self._inner_api_calls: + self._inner_api_calls[ + "list_recommendations" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_recommendations, + default_retry=self._method_configs["ListRecommendations"].retry, + default_timeout=self._method_configs["ListRecommendations"].timeout, + client_info=self._client_info, + ) + + request = recommender_service_pb2.ListRecommendationsRequest( + parent=parent, page_size=page_size, filter=filter_ + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_recommendations"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="recommendations", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def get_recommendation( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets the requested recommendation. Requires the recommender.\*.get IAM + permission for the specified recommender. + + Example: + >>> from google.cloud import recommender_v1beta1 + >>> + >>> client = recommender_v1beta1.RecommenderClient() + >>> + >>> name = client.recommendation_path('[PROJECT]', '[LOCATION]', '[RECOMMENDER]', '[RECOMMENDATION]') + >>> + >>> response = client.get_recommendation(name) + + Args: + name (str): Name of the recommendation. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.recommender_v1beta1.types.Recommendation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_recommendation" not in self._inner_api_calls: + self._inner_api_calls[ + "get_recommendation" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_recommendation, + default_retry=self._method_configs["GetRecommendation"].retry, + default_timeout=self._method_configs["GetRecommendation"].timeout, + client_info=self._client_info, + ) + + request = recommender_service_pb2.GetRecommendationRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_recommendation"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def mark_recommendation_claimed( + self, + name, + etag, + state_metadata=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Mark the Recommendation State as Claimed. Users can use this method to + indicate to the Recommender API that they are starting to apply the + recommendation themselves. This stops the recommendation content from + being updated. + + MarkRecommendationClaimed can be applied to recommendations in CLAIMED, + SUCCEEDED, FAILED, or ACTIVE state. + + Requires the recommender.\*.update IAM permission for the specified + recommender. + + Example: + >>> from google.cloud import recommender_v1beta1 + >>> + >>> client = recommender_v1beta1.RecommenderClient() + >>> + >>> name = client.recommendation_path('[PROJECT]', '[LOCATION]', '[RECOMMENDER]', '[RECOMMENDATION]') + >>> + >>> # TODO: Initialize `etag`: + >>> etag = '' + >>> + >>> response = client.mark_recommendation_claimed(name, etag) + + Args: + name (str): Name of the recommendation. + etag (str): Fingerprint of the Recommendation. Provides optimistic locking. + state_metadata (dict[str -> str]): State properties to include with this state. Overwrites any existing + ``state_metadata``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.recommender_v1beta1.types.Recommendation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "mark_recommendation_claimed" not in self._inner_api_calls: + self._inner_api_calls[ + "mark_recommendation_claimed" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.mark_recommendation_claimed, + default_retry=self._method_configs["MarkRecommendationClaimed"].retry, + default_timeout=self._method_configs[ + "MarkRecommendationClaimed" + ].timeout, + client_info=self._client_info, + ) + + request = recommender_service_pb2.MarkRecommendationClaimedRequest( + name=name, etag=etag, state_metadata=state_metadata + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["mark_recommendation_claimed"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def mark_recommendation_succeeded( + self, + name, + etag, + state_metadata=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Mark the Recommendation State as Succeeded. Users can use this method to + indicate to the Recommender API that they have applied the + recommendation themselves, and the operation was successful. This stops + the recommendation content from being updated. + + MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, + CLAIMED, SUCCEEDED, or FAILED state. + + Requires the recommender.\*.update IAM permission for the specified + recommender. + + Example: + >>> from google.cloud import recommender_v1beta1 + >>> + >>> client = recommender_v1beta1.RecommenderClient() + >>> + >>> name = client.recommendation_path('[PROJECT]', '[LOCATION]', '[RECOMMENDER]', '[RECOMMENDATION]') + >>> + >>> # TODO: Initialize `etag`: + >>> etag = '' + >>> + >>> response = client.mark_recommendation_succeeded(name, etag) + + Args: + name (str): Name of the recommendation. + etag (str): Fingerprint of the Recommendation. Provides optimistic locking. + state_metadata (dict[str -> str]): State properties to include with this state. Overwrites any existing + ``state_metadata``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.recommender_v1beta1.types.Recommendation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "mark_recommendation_succeeded" not in self._inner_api_calls: + self._inner_api_calls[ + "mark_recommendation_succeeded" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.mark_recommendation_succeeded, + default_retry=self._method_configs["MarkRecommendationSucceeded"].retry, + default_timeout=self._method_configs[ + "MarkRecommendationSucceeded" + ].timeout, + client_info=self._client_info, + ) + + request = recommender_service_pb2.MarkRecommendationSucceededRequest( + name=name, etag=etag, state_metadata=state_metadata + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["mark_recommendation_succeeded"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def mark_recommendation_failed( + self, + name, + etag, + state_metadata=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Mark the Recommendation State as Failed. Users can use this method to + indicate to the Recommender API that they have applied the + recommendation themselves, and the operation failed. This stops the + recommendation content from being updated. + + MarkRecommendationFailed can be applied to recommendations in ACTIVE, + CLAIMED, SUCCEEDED, or FAILED state. + + Requires the recommender.\*.update IAM permission for the specified + recommender. + + Example: + >>> from google.cloud import recommender_v1beta1 + >>> + >>> client = recommender_v1beta1.RecommenderClient() + >>> + >>> name = client.recommendation_path('[PROJECT]', '[LOCATION]', '[RECOMMENDER]', '[RECOMMENDATION]') + >>> + >>> # TODO: Initialize `etag`: + >>> etag = '' + >>> + >>> response = client.mark_recommendation_failed(name, etag) + + Args: + name (str): Name of the recommendation. + etag (str): Fingerprint of the Recommendation. Provides optimistic locking. + state_metadata (dict[str -> str]): State properties to include with this state. Overwrites any existing + ``state_metadata``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.recommender_v1beta1.types.Recommendation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "mark_recommendation_failed" not in self._inner_api_calls: + self._inner_api_calls[ + "mark_recommendation_failed" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.mark_recommendation_failed, + default_retry=self._method_configs["MarkRecommendationFailed"].retry, + default_timeout=self._method_configs[ + "MarkRecommendationFailed" + ].timeout, + client_info=self._client_info, + ) + + request = recommender_service_pb2.MarkRecommendationFailedRequest( + name=name, etag=etag, state_metadata=state_metadata + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["mark_recommendation_failed"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/recommender/google/cloud/recommender_v1beta1/gapic/recommender_client_config.py b/recommender/google/cloud/recommender_v1beta1/gapic/recommender_client_config.py new file mode 100644 index 000000000000..a1a36482eecd --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/gapic/recommender_client_config.py @@ -0,0 +1,48 @@ +config = { + "interfaces": { + "google.cloud.recommender.v1beta1.Recommender": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "ListRecommendations": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "GetRecommendation": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "MarkRecommendationClaimed": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "MarkRecommendationSucceeded": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "MarkRecommendationFailed": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/recommender/google/cloud/recommender_v1beta1/gapic/transports/__init__.py b/recommender/google/cloud/recommender_v1beta1/gapic/transports/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/recommender/google/cloud/recommender_v1beta1/gapic/transports/recommender_grpc_transport.py b/recommender/google/cloud/recommender_v1beta1/gapic/transports/recommender_grpc_transport.py new file mode 100644 index 000000000000..cc3af3d44211 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/gapic/transports/recommender_grpc_transport.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers + +from google.cloud.recommender_v1beta1.proto import recommender_service_pb2_grpc + + +class RecommenderGrpcTransport(object): + """gRPC transport class providing stubs for + google.cloud.recommender.v1beta1 Recommender API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, channel=None, credentials=None, address="recommender.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + "recommender_stub": recommender_service_pb2_grpc.RecommenderStub(channel) + } + + @classmethod + def create_channel( + cls, address="recommender.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def list_recommendations(self): + """Return the gRPC stub for :meth:`RecommenderClient.list_recommendations`. + + Lists recommendations for a Cloud project. Requires the + recommender.\*.list IAM permission for the specified recommender. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["recommender_stub"].ListRecommendations + + @property + def get_recommendation(self): + """Return the gRPC stub for :meth:`RecommenderClient.get_recommendation`. + + Gets the requested recommendation. Requires the recommender.\*.get IAM + permission for the specified recommender. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["recommender_stub"].GetRecommendation + + @property + def mark_recommendation_claimed(self): + """Return the gRPC stub for :meth:`RecommenderClient.mark_recommendation_claimed`. + + Mark the Recommendation State as Claimed. Users can use this method to + indicate to the Recommender API that they are starting to apply the + recommendation themselves. This stops the recommendation content from + being updated. + + MarkRecommendationClaimed can be applied to recommendations in CLAIMED, + SUCCEEDED, FAILED, or ACTIVE state. + + Requires the recommender.\*.update IAM permission for the specified + recommender. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["recommender_stub"].MarkRecommendationClaimed + + @property + def mark_recommendation_succeeded(self): + """Return the gRPC stub for :meth:`RecommenderClient.mark_recommendation_succeeded`. + + Mark the Recommendation State as Succeeded. Users can use this method to + indicate to the Recommender API that they have applied the + recommendation themselves, and the operation was successful. This stops + the recommendation content from being updated. + + MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, + CLAIMED, SUCCEEDED, or FAILED state. + + Requires the recommender.\*.update IAM permission for the specified + recommender. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["recommender_stub"].MarkRecommendationSucceeded + + @property + def mark_recommendation_failed(self): + """Return the gRPC stub for :meth:`RecommenderClient.mark_recommendation_failed`. + + Mark the Recommendation State as Failed. Users can use this method to + indicate to the Recommender API that they have applied the + recommendation themselves, and the operation failed. This stops the + recommendation content from being updated. + + MarkRecommendationFailed can be applied to recommendations in ACTIVE, + CLAIMED, SUCCEEDED, or FAILED state. + + Requires the recommender.\*.update IAM permission for the specified + recommender. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["recommender_stub"].MarkRecommendationFailed diff --git a/recommender/google/cloud/recommender_v1beta1/proto/__init__.py b/recommender/google/cloud/recommender_v1beta1/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/recommender/google/cloud/recommender_v1beta1/proto/recommendation.proto b/recommender/google/cloud/recommender_v1beta1/proto/recommendation.proto new file mode 100644 index 000000000000..8980de0c4274 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/proto/recommendation.proto @@ -0,0 +1,234 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.recommender.v1beta1; + +import "google/protobuf/duration.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; +import "google/type/money.proto"; + +option csharp_namespace = "Google.Cloud.Recommender.V1Beta1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/recommender/v1beta1;recommender"; +option java_multiple_files = true; +option java_package = "com.google.cloud.recommender.v1beta1"; +option objc_class_prefix = "CREC"; + +// A recommendation along with a suggested action. E.g., a rightsizing +// recommendation for an underutilized VM, IAM role recommendations, etc +message Recommendation { + // Name of recommendation. + // + // A project recommendation is represented as + // projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/recommendations/[RECOMMENDATION_ID] + string name = 1; + + // Free-form human readable summary in English. The maximum length is 500 + // characters. + string description = 2; + + // Contains an identifier for a subtype of recommendations produced for the + // same recommender. Subtype is a function of content and impact, meaning a + // new subtype will be added when either content or primary impact category + // changes. + // + // Examples: + // For recommender = "google.iam.policy.RoleRecommender", + // recommender_subtype can be one of "REMOVE_ROLE"/"REPLACE_ROLE" + string recommender_subtype = 12; + + // Last time this recommendation was refreshed by the system that created it + // in the first place. + google.protobuf.Timestamp last_refresh_time = 4; + + // The primary impact that this recommendation can have while trying to + // optimize for one category. + Impact primary_impact = 5; + + // Optional set of additional impact that this recommendation may have when + // trying to optimize for the primary category. These may be positive + // or negative. + repeated Impact additional_impact = 6; + + // Content of the recommendation describing recommended changes to resources. + RecommendationContent content = 7; + + // Information for state. Contains state and metadata. + RecommendationStateInfo state_info = 10; + + // Fingerprint of the Recommendation. Provides optimistic locking when + // updating states. + string etag = 11; +} + +// Contains what resources are changing and how they are changing. +message RecommendationContent { + // Operations to one or more Google Cloud resources grouped in such a way + // that, all operations within one group are expected to be performed + // atomically and in an order. + repeated OperationGroup operation_groups = 2; +} + +// Group of operations that need to be performed atomically. +message OperationGroup { + // List of operations across one or more resources that belong to this group. + // Loosely based on RFC6902 and should be performed in the order they appear. + repeated Operation operations = 1; +} + +// Contains an operation for a resource inspired by the JSON-PATCH format with +// support for: +// * Custom filters for describing partial array patch. +// * Extended path values for describing nested arrays. +// * Custom fields for describing the resource for which the operation is being +// described. +// * Allows extension to custom operations not natively supported by RFC6902. +// See https://tools.ietf.org/html/rfc6902 for details on the original RFC. +message Operation { + // Type of this operation. Contains one of 'and', 'remove', 'replace', 'move', + // 'copy', 'test' and custom operations. This field is case-insensitive and + // always populated. + string action = 1; + + // Type of GCP resource being modified/tested. This field is always populated. + // Example: cloudresourcemanager.googleapis.com/Project, + // compute.googleapis.com/Instance + string resource_type = 2; + + // Contains the fully qualified resource name. This field is always populated. + // ex: //cloudresourcemanager.googleapis.com/projects/foo. + string resource = 3; + + // Path to the target field being operated on. If the operation is at the + // resource level, then path should be "/". This field is always populated. + string path = 4; + + // Can be set with action 'copy' to copy resource configuration across + // different resources of the same type. Example: A resource clone can be + // done via action = 'copy', path = "/", from = "/", + // source_resource = and resource_name = . + // This field is empty for all other values of `action`. + string source_resource = 5; + + // Can be set with action 'copy' or 'move' to indicate the source field within + // resource or source_resource, ignored if provided for other operation types. + string source_path = 6; + + // Value for the `path` field. Set if action is 'add'/'replace'/'test'. + google.protobuf.Value value = 7; + + // Set of filters to apply if `path` refers to array elements or nested array + // elements in order to narrow down to a single unique element that is being + // tested/modified. + // Note that this is intended to be an exact match per filter. + // Example: { + // "/versions/*/name" : "it-123" + // "/versions/*/targetSize/percent": 20 + // } + // Example: { + // "/bindings/*/role": "roles/admin" + // "/bindings/*/condition" : null + // } + // Example: { + // "/bindings/*/role": "roles/admin" + // "/bindings/*/members/*" : ["x@google.com", "y@google.com"] + // } + map path_filters = 8; +} + +// Contains metadata about how much money a recommendation can save or incur. +message CostProjection { + // An approximate projection on amount saved or amount incurred. Negative cost + // units indicate cost savings and positive cost units indicate increase. + // See google.type.Money documentation for positive/negative units. + google.type.Money cost = 1; + + // Duration for which this cost applies. + google.protobuf.Duration duration = 2; +} + +// Contains the impact a recommendation can have for a given category. +message Impact { + // The category of the impact. + enum Category { + // Default unspecified category. Don't use directly. + CATEGORY_UNSPECIFIED = 0; + + // Indicates a potential increase or decrease in cost. + COST = 1; + + // Indicates a potential increase or decrease in security. + SECURITY = 2; + + // Indicates a potential increase or decrease in performance. + PERFORMANCE = 3; + } + + // Category that is being targeted. + Category category = 1; + + // Contains projections (if any) for this category. + oneof projection { + // Use with CategoryType.COST + CostProjection cost_projection = 100; + } +} + +// Information for state. Contains state and metadata. +message RecommendationStateInfo { + // Represents Recommendation State + enum State { + // Default state. Don't use directly. + STATE_UNSPECIFIED = 0; + + // Recommendation is active and can be applied. Recommendations content can + // be updated by Google. + // + // ACTIVE recommendations can be marked as CLAIMED, SUCCEEDED, or FAILED. + ACTIVE = 1; + + // Recommendation is in claimed state. Recommendations content is + // immutable and cannot be updated by Google. + // + // CLAIMED recommendations can be marked as CLAIMED, SUCCEEDED, or FAILED. + CLAIMED = 6; + + // Recommendation is in succeeded state. Recommendations content is + // immutable and cannot be updated by Google. + // + // SUCCEEDED recommendations can be marked as SUCCEEDED, or FAILED. + SUCCEEDED = 3; + + // Recommendation is in failed state. Recommendations content is immutable + // and cannot be updated by Google. + // + // FAILED recommendations can be marked as SUCCEEDED, or FAILED. + FAILED = 4; + + // Recommendation is in dismissed state. Recommendation content can be + // updated by Google. + // + // DISMISSED recommendations can be marked as ACTIVE. + DISMISSED = 5; + } + + // The state of the recommendation, Eg ACTIVE, SUCCEEDED, FAILED. + State state = 1; + + // A map of metadata for the state, provided by user or automations systems. + map state_metadata = 2; +} diff --git a/recommender/google/cloud/recommender_v1beta1/proto/recommendation_pb2.py b/recommender/google/cloud/recommender_v1beta1/proto/recommendation_pb2.py new file mode 100644 index 000000000000..e7d5913a69a3 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/proto/recommendation_pb2.py @@ -0,0 +1,1119 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/recommender_v1beta1/proto/recommendation.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.type import money_pb2 as google_dot_type_dot_money__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/recommender_v1beta1/proto/recommendation.proto", + package="google.cloud.recommender.v1beta1", + syntax="proto3", + serialized_options=_b( + "\n$com.google.cloud.recommender.v1beta1P\001ZKgoogle.golang.org/genproto/googleapis/cloud/recommender/v1beta1;recommender\242\002\004CREC\252\002 Google.Cloud.Recommender.V1Beta1" + ), + serialized_pb=_b( + '\n;google/cloud/recommender_v1beta1/proto/recommendation.proto\x12 google.cloud.recommender.v1beta1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/type/money.proto"\xb5\x03\n\x0eRecommendation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x1b\n\x13recommender_subtype\x18\x0c \x01(\t\x12\x35\n\x11last_refresh_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12@\n\x0eprimary_impact\x18\x05 \x01(\x0b\x32(.google.cloud.recommender.v1beta1.Impact\x12\x43\n\x11\x61\x64\x64itional_impact\x18\x06 \x03(\x0b\x32(.google.cloud.recommender.v1beta1.Impact\x12H\n\x07\x63ontent\x18\x07 \x01(\x0b\x32\x37.google.cloud.recommender.v1beta1.RecommendationContent\x12M\n\nstate_info\x18\n \x01(\x0b\x32\x39.google.cloud.recommender.v1beta1.RecommendationStateInfo\x12\x0c\n\x04\x65tag\x18\x0b \x01(\t"c\n\x15RecommendationContent\x12J\n\x10operation_groups\x18\x02 \x03(\x0b\x32\x30.google.cloud.recommender.v1beta1.OperationGroup"Q\n\x0eOperationGroup\x12?\n\noperations\x18\x01 \x03(\x0b\x32+.google.cloud.recommender.v1beta1.Operation"\xc7\x02\n\tOperation\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12\x15\n\rresource_type\x18\x02 \x01(\t\x12\x10\n\x08resource\x18\x03 \x01(\t\x12\x0c\n\x04path\x18\x04 \x01(\t\x12\x17\n\x0fsource_resource\x18\x05 \x01(\t\x12\x13\n\x0bsource_path\x18\x06 \x01(\t\x12%\n\x05value\x18\x07 \x01(\x0b\x32\x16.google.protobuf.Value\x12R\n\x0cpath_filters\x18\x08 \x03(\x0b\x32<.google.cloud.recommender.v1beta1.Operation.PathFiltersEntry\x1aJ\n\x10PathFiltersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01"_\n\x0e\x43ostProjection\x12 \n\x04\x63ost\x18\x01 \x01(\x0b\x32\x12.google.type.Money\x12+\n\x08\x64uration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xf7\x01\n\x06Impact\x12\x43\n\x08\x63\x61tegory\x18\x01 \x01(\x0e\x32\x31.google.cloud.recommender.v1beta1.Impact.Category\x12K\n\x0f\x63ost_projection\x18\x64 \x01(\x0b\x32\x30.google.cloud.recommender.v1beta1.CostProjectionH\x00"M\n\x08\x43\x61tegory\x12\x18\n\x14\x43\x41TEGORY_UNSPECIFIED\x10\x00\x12\x08\n\x04\x43OST\x10\x01\x12\x0c\n\x08SECURITY\x10\x02\x12\x0f\n\x0bPERFORMANCE\x10\x03\x42\x0c\n\nprojection"\xe8\x02\n\x17RecommendationStateInfo\x12N\n\x05state\x18\x01 \x01(\x0e\x32?.google.cloud.recommender.v1beta1.RecommendationStateInfo.State\x12\x64\n\x0estate_metadata\x18\x02 \x03(\x0b\x32L.google.cloud.recommender.v1beta1.RecommendationStateInfo.StateMetadataEntry\x1a\x34\n\x12StateMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"a\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0b\n\x07\x43LAIMED\x10\x06\x12\r\n\tSUCCEEDED\x10\x03\x12\n\n\x06\x46\x41ILED\x10\x04\x12\r\n\tDISMISSED\x10\x05\x42\x9f\x01\n$com.google.cloud.recommender.v1beta1P\x01ZKgoogle.golang.org/genproto/googleapis/cloud/recommender/v1beta1;recommender\xa2\x02\x04\x43REC\xaa\x02 Google.Cloud.Recommender.V1Beta1b\x06proto3' + ), + dependencies=[ + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, + google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_type_dot_money__pb2.DESCRIPTOR, + ], +) + + +_IMPACT_CATEGORY = _descriptor.EnumDescriptor( + name="Category", + full_name="google.cloud.recommender.v1beta1.Impact.Category", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="CATEGORY_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="COST", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SECURITY", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="PERFORMANCE", index=3, number=3, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=1425, + serialized_end=1502, +) +_sym_db.RegisterEnumDescriptor(_IMPACT_CATEGORY) + +_RECOMMENDATIONSTATEINFO_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo.State", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="ACTIVE", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CLAIMED", index=2, number=6, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SUCCEEDED", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="FAILED", index=4, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="DISMISSED", index=5, number=5, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=1782, + serialized_end=1879, +) +_sym_db.RegisterEnumDescriptor(_RECOMMENDATIONSTATEINFO_STATE) + + +_RECOMMENDATION = _descriptor.Descriptor( + name="Recommendation", + full_name="google.cloud.recommender.v1beta1.Recommendation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.recommender.v1beta1.Recommendation.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="google.cloud.recommender.v1beta1.Recommendation.description", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="recommender_subtype", + full_name="google.cloud.recommender.v1beta1.Recommendation.recommender_subtype", + index=2, + number=12, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="last_refresh_time", + full_name="google.cloud.recommender.v1beta1.Recommendation.last_refresh_time", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="primary_impact", + full_name="google.cloud.recommender.v1beta1.Recommendation.primary_impact", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="additional_impact", + full_name="google.cloud.recommender.v1beta1.Recommendation.additional_impact", + index=5, + number=6, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="content", + full_name="google.cloud.recommender.v1beta1.Recommendation.content", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state_info", + full_name="google.cloud.recommender.v1beta1.Recommendation.state_info", + index=7, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="etag", + full_name="google.cloud.recommender.v1beta1.Recommendation.etag", + index=8, + number=11, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=218, + serialized_end=655, +) + + +_RECOMMENDATIONCONTENT = _descriptor.Descriptor( + name="RecommendationContent", + full_name="google.cloud.recommender.v1beta1.RecommendationContent", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="operation_groups", + full_name="google.cloud.recommender.v1beta1.RecommendationContent.operation_groups", + index=0, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=657, + serialized_end=756, +) + + +_OPERATIONGROUP = _descriptor.Descriptor( + name="OperationGroup", + full_name="google.cloud.recommender.v1beta1.OperationGroup", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="operations", + full_name="google.cloud.recommender.v1beta1.OperationGroup.operations", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=758, + serialized_end=839, +) + + +_OPERATION_PATHFILTERSENTRY = _descriptor.Descriptor( + name="PathFiltersEntry", + full_name="google.cloud.recommender.v1beta1.Operation.PathFiltersEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.recommender.v1beta1.Operation.PathFiltersEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.recommender.v1beta1.Operation.PathFiltersEntry.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1095, + serialized_end=1169, +) + +_OPERATION = _descriptor.Descriptor( + name="Operation", + full_name="google.cloud.recommender.v1beta1.Operation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="action", + full_name="google.cloud.recommender.v1beta1.Operation.action", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resource_type", + full_name="google.cloud.recommender.v1beta1.Operation.resource_type", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resource", + full_name="google.cloud.recommender.v1beta1.Operation.resource", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="path", + full_name="google.cloud.recommender.v1beta1.Operation.path", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_resource", + full_name="google.cloud.recommender.v1beta1.Operation.source_resource", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_path", + full_name="google.cloud.recommender.v1beta1.Operation.source_path", + index=5, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.recommender.v1beta1.Operation.value", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="path_filters", + full_name="google.cloud.recommender.v1beta1.Operation.path_filters", + index=7, + number=8, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_OPERATION_PATHFILTERSENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=842, + serialized_end=1169, +) + + +_COSTPROJECTION = _descriptor.Descriptor( + name="CostProjection", + full_name="google.cloud.recommender.v1beta1.CostProjection", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="cost", + full_name="google.cloud.recommender.v1beta1.CostProjection.cost", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="duration", + full_name="google.cloud.recommender.v1beta1.CostProjection.duration", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1171, + serialized_end=1266, +) + + +_IMPACT = _descriptor.Descriptor( + name="Impact", + full_name="google.cloud.recommender.v1beta1.Impact", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="category", + full_name="google.cloud.recommender.v1beta1.Impact.category", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cost_projection", + full_name="google.cloud.recommender.v1beta1.Impact.cost_projection", + index=1, + number=100, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_IMPACT_CATEGORY], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="projection", + full_name="google.cloud.recommender.v1beta1.Impact.projection", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1269, + serialized_end=1516, +) + + +_RECOMMENDATIONSTATEINFO_STATEMETADATAENTRY = _descriptor.Descriptor( + name="StateMetadataEntry", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo.StateMetadataEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo.StateMetadataEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo.StateMetadataEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1728, + serialized_end=1780, +) + +_RECOMMENDATIONSTATEINFO = _descriptor.Descriptor( + name="RecommendationStateInfo", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="state", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo.state", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state_metadata", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo.state_metadata", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_RECOMMENDATIONSTATEINFO_STATEMETADATAENTRY], + enum_types=[_RECOMMENDATIONSTATEINFO_STATE], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1519, + serialized_end=1879, +) + +_RECOMMENDATION.fields_by_name[ + "last_refresh_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_RECOMMENDATION.fields_by_name["primary_impact"].message_type = _IMPACT +_RECOMMENDATION.fields_by_name["additional_impact"].message_type = _IMPACT +_RECOMMENDATION.fields_by_name["content"].message_type = _RECOMMENDATIONCONTENT +_RECOMMENDATION.fields_by_name["state_info"].message_type = _RECOMMENDATIONSTATEINFO +_RECOMMENDATIONCONTENT.fields_by_name["operation_groups"].message_type = _OPERATIONGROUP +_OPERATIONGROUP.fields_by_name["operations"].message_type = _OPERATION +_OPERATION_PATHFILTERSENTRY.fields_by_name[ + "value" +].message_type = google_dot_protobuf_dot_struct__pb2._VALUE +_OPERATION_PATHFILTERSENTRY.containing_type = _OPERATION +_OPERATION.fields_by_name[ + "value" +].message_type = google_dot_protobuf_dot_struct__pb2._VALUE +_OPERATION.fields_by_name["path_filters"].message_type = _OPERATION_PATHFILTERSENTRY +_COSTPROJECTION.fields_by_name[ + "cost" +].message_type = google_dot_type_dot_money__pb2._MONEY +_COSTPROJECTION.fields_by_name[ + "duration" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_IMPACT.fields_by_name["category"].enum_type = _IMPACT_CATEGORY +_IMPACT.fields_by_name["cost_projection"].message_type = _COSTPROJECTION +_IMPACT_CATEGORY.containing_type = _IMPACT +_IMPACT.oneofs_by_name["projection"].fields.append( + _IMPACT.fields_by_name["cost_projection"] +) +_IMPACT.fields_by_name["cost_projection"].containing_oneof = _IMPACT.oneofs_by_name[ + "projection" +] +_RECOMMENDATIONSTATEINFO_STATEMETADATAENTRY.containing_type = _RECOMMENDATIONSTATEINFO +_RECOMMENDATIONSTATEINFO.fields_by_name[ + "state" +].enum_type = _RECOMMENDATIONSTATEINFO_STATE +_RECOMMENDATIONSTATEINFO.fields_by_name[ + "state_metadata" +].message_type = _RECOMMENDATIONSTATEINFO_STATEMETADATAENTRY +_RECOMMENDATIONSTATEINFO_STATE.containing_type = _RECOMMENDATIONSTATEINFO +DESCRIPTOR.message_types_by_name["Recommendation"] = _RECOMMENDATION +DESCRIPTOR.message_types_by_name["RecommendationContent"] = _RECOMMENDATIONCONTENT +DESCRIPTOR.message_types_by_name["OperationGroup"] = _OPERATIONGROUP +DESCRIPTOR.message_types_by_name["Operation"] = _OPERATION +DESCRIPTOR.message_types_by_name["CostProjection"] = _COSTPROJECTION +DESCRIPTOR.message_types_by_name["Impact"] = _IMPACT +DESCRIPTOR.message_types_by_name["RecommendationStateInfo"] = _RECOMMENDATIONSTATEINFO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Recommendation = _reflection.GeneratedProtocolMessageType( + "Recommendation", + (_message.Message,), + dict( + DESCRIPTOR=_RECOMMENDATION, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""A recommendation along with a suggested action. E.g., a rightsizing + recommendation for an underutilized VM, IAM role recommendations, etc + + + Attributes: + name: + Name of recommendation. A project recommendation is + represented as projects/[PROJECT\_NUMBER]/locations/[LOCATION] + /recommenders/[RECOMMENDER\_ID]/recommendations/[RECOMMENDATIO + N\_ID] + description: + Free-form human readable summary in English. The maximum + length is 500 characters. + recommender_subtype: + Contains an identifier for a subtype of recommendations + produced for the same recommender. Subtype is a function of + content and impact, meaning a new subtype will be added when + either content or primary impact category changes. Examples: + For recommender = "google.iam.policy.RoleRecommender", + recommender\_subtype can be one of + "REMOVE\_ROLE"/"REPLACE\_ROLE" + last_refresh_time: + Last time this recommendation was refreshed by the system that + created it in the first place. + primary_impact: + The primary impact that this recommendation can have while + trying to optimize for one category. + additional_impact: + Optional set of additional impact that this recommendation may + have when trying to optimize for the primary category. These + may be positive or negative. + content: + Content of the recommendation describing recommended changes + to resources. + state_info: + Information for state. Contains state and metadata. + etag: + Fingerprint of the Recommendation. Provides optimistic locking + when updating states. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.Recommendation) + ), +) +_sym_db.RegisterMessage(Recommendation) + +RecommendationContent = _reflection.GeneratedProtocolMessageType( + "RecommendationContent", + (_message.Message,), + dict( + DESCRIPTOR=_RECOMMENDATIONCONTENT, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""Contains what resources are changing and how they are changing. + + + Attributes: + operation_groups: + Operations to one or more Google Cloud resources grouped in + such a way that, all operations within one group are expected + to be performed atomically and in an order. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.RecommendationContent) + ), +) +_sym_db.RegisterMessage(RecommendationContent) + +OperationGroup = _reflection.GeneratedProtocolMessageType( + "OperationGroup", + (_message.Message,), + dict( + DESCRIPTOR=_OPERATIONGROUP, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""Group of operations that need to be performed atomically. + + + Attributes: + operations: + List of operations across one or more resources that belong to + this group. Loosely based on RFC6902 and should be performed + in the order they appear. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.OperationGroup) + ), +) +_sym_db.RegisterMessage(OperationGroup) + +Operation = _reflection.GeneratedProtocolMessageType( + "Operation", + (_message.Message,), + dict( + PathFiltersEntry=_reflection.GeneratedProtocolMessageType( + "PathFiltersEntry", + (_message.Message,), + dict( + DESCRIPTOR=_OPERATION_PATHFILTERSENTRY, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.Operation.PathFiltersEntry) + ), + ), + DESCRIPTOR=_OPERATION, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""Contains an operation for a resource inspired by the JSON-PATCH format + with support for: \* Custom filters for describing partial array patch. + \* Extended path values for describing nested arrays. \* Custom fields + for describing the resource for which the operation is being described. + \* Allows extension to custom operations not natively supported by + RFC6902. See https://tools.ietf.org/html/rfc6902 for details on the + original RFC. + + + Attributes: + action: + Type of this operation. Contains one of 'and', 'remove', + 'replace', 'move', 'copy', 'test' and custom operations. This + field is case-insensitive and always populated. + resource_type: + Type of GCP resource being modified/tested. This field is + always populated. Example: + cloudresourcemanager.googleapis.com/Project, + compute.googleapis.com/Instance + resource: + Contains the fully qualified resource name. This field is + always populated. ex: + //cloudresourcemanager.googleapis.com/projects/foo. + path: + Path to the target field being operated on. If the operation + is at the resource level, then path should be "/". This field + is always populated. + source_resource: + Can be set with action 'copy' to copy resource configuration + across different resources of the same type. Example: A + resource clone can be done via action = 'copy', path = "/", + from = "/", source\_resource = and resource\_name = . This + field is empty for all other values of ``action``. + source_path: + Can be set with action 'copy' or 'move' to indicate the source + field within resource or source\_resource, ignored if provided + for other operation types. + value: + Value for the ``path`` field. Set if action is + 'add'/'replace'/'test'. + path_filters: + Set of filters to apply if ``path`` refers to array elements + or nested array elements in order to narrow down to a single + unique element that is being tested/modified. Note that this + is intended to be an exact match per filter. Example: { + "/versions/*/name" : "it-123" + "/versions/*/targetSize/percent": 20 } Example: { + "/bindings/*/role": "roles/admin" "/bindings/*/condition" : + null } Example: { "/bindings/*/role": "roles/admin" + "/bindings/*/members/\*" : ["x@google.com", "y@google.com"] } + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.Operation) + ), +) +_sym_db.RegisterMessage(Operation) +_sym_db.RegisterMessage(Operation.PathFiltersEntry) + +CostProjection = _reflection.GeneratedProtocolMessageType( + "CostProjection", + (_message.Message,), + dict( + DESCRIPTOR=_COSTPROJECTION, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""Contains metadata about how much money a recommendation can save or + incur. + + + Attributes: + cost: + An approximate projection on amount saved or amount incurred. + Negative cost units indicate cost savings and positive cost + units indicate increase. See google.type.Money documentation + for positive/negative units. + duration: + Duration for which this cost applies. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.CostProjection) + ), +) +_sym_db.RegisterMessage(CostProjection) + +Impact = _reflection.GeneratedProtocolMessageType( + "Impact", + (_message.Message,), + dict( + DESCRIPTOR=_IMPACT, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""Contains the impact a recommendation can have for a given category. + + + Attributes: + category: + Category that is being targeted. + projection: + Contains projections (if any) for this category. + cost_projection: + Use with CategoryType.COST + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.Impact) + ), +) +_sym_db.RegisterMessage(Impact) + +RecommendationStateInfo = _reflection.GeneratedProtocolMessageType( + "RecommendationStateInfo", + (_message.Message,), + dict( + StateMetadataEntry=_reflection.GeneratedProtocolMessageType( + "StateMetadataEntry", + (_message.Message,), + dict( + DESCRIPTOR=_RECOMMENDATIONSTATEINFO_STATEMETADATAENTRY, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.RecommendationStateInfo.StateMetadataEntry) + ), + ), + DESCRIPTOR=_RECOMMENDATIONSTATEINFO, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""Information for state. Contains state and metadata. + + + Attributes: + state: + The state of the recommendation, Eg ACTIVE, SUCCEEDED, FAILED. + state_metadata: + A map of metadata for the state, provided by user or + automations systems. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.RecommendationStateInfo) + ), +) +_sym_db.RegisterMessage(RecommendationStateInfo) +_sym_db.RegisterMessage(RecommendationStateInfo.StateMetadataEntry) + + +DESCRIPTOR._options = None +_OPERATION_PATHFILTERSENTRY._options = None +_RECOMMENDATIONSTATEINFO_STATEMETADATAENTRY._options = None +# @@protoc_insertion_point(module_scope) diff --git a/recommender/google/cloud/recommender_v1beta1/proto/recommendation_pb2_grpc.py b/recommender/google/cloud/recommender_v1beta1/proto/recommendation_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/proto/recommendation_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/recommender/google/cloud/recommender_v1beta1/proto/recommender_service.proto b/recommender/google/cloud/recommender_v1beta1/proto/recommender_service.proto new file mode 100644 index 000000000000..49a076ae74f6 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/proto/recommender_service.proto @@ -0,0 +1,190 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.recommender.v1beta1; + +import "google/api/annotations.proto"; +import "google/cloud/recommender/v1beta1/recommendation.proto"; +import "google/longrunning/operations.proto"; +import "google/api/client.proto"; + +option csharp_namespace = "Google.Cloud.Recommmender.V1Beta1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/recommender/v1beta1;recommender"; +option java_multiple_files = true; +option java_outer_classname = "RecommenderProto"; +option java_package = "com.google.cloud.recommender.v1beta1"; +option objc_class_prefix = "CREC"; + +// Provides recommendations for cloud customers for various categories like +// performance optimization, cost savings, reliability, feature discovery, etc. +// These recommendations are generated automatically based on analysis of user +// resources, configuration and monitoring metrics. +service Recommender { + option (google.api.default_host) = "recommender.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Lists recommendations for a Cloud project. Requires the recommender.*.list + // IAM permission for the specified recommender. + rpc ListRecommendations(ListRecommendationsRequest) returns (ListRecommendationsResponse) { + option (google.api.http) = { + get: "/v1beta1/{parent=projects/*/locations/*/recommenders/*}/recommendations" + }; + } + + // Gets the requested recommendation. Requires the recommender.*.get + // IAM permission for the specified recommender. + rpc GetRecommendation(GetRecommendationRequest) returns (Recommendation) { + option (google.api.http) = { + get: "/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}" + }; + } + + // Mark the Recommendation State as Claimed. Users can use this method to + // indicate to the Recommender API that they are starting to apply the + // recommendation themselves. This stops the recommendation content from being + // updated. + // + // MarkRecommendationClaimed can be applied to recommendations in CLAIMED, + // SUCCEEDED, FAILED, or ACTIVE state. + // + // Requires the recommender.*.update IAM permission for the specified + // recommender. + rpc MarkRecommendationClaimed(MarkRecommendationClaimedRequest) returns (Recommendation) { + option (google.api.http) = { + post: "/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markClaimed" + body: "*" + }; + } + + // Mark the Recommendation State as Succeeded. Users can use this method to + // indicate to the Recommender API that they have applied the recommendation + // themselves, and the operation was successful. This stops the recommendation + // content from being updated. + // + // MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, + // CLAIMED, SUCCEEDED, or FAILED state. + // + // Requires the recommender.*.update IAM permission for the specified + // recommender. + rpc MarkRecommendationSucceeded(MarkRecommendationSucceededRequest) returns (Recommendation) { + option (google.api.http) = { + post: "/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markSucceeded" + body: "*" + }; + } + + // Mark the Recommendation State as Failed. Users can use this method to + // indicate to the Recommender API that they have applied the recommendation + // themselves, and the operation failed. This stops the recommendation content + // from being updated. + // + // MarkRecommendationFailed can be applied to recommendations in ACTIVE, + // CLAIMED, SUCCEEDED, or FAILED state. + // + // Requires the recommender.*.update IAM permission for the specified + // recommender. + rpc MarkRecommendationFailed(MarkRecommendationFailedRequest) returns (Recommendation) { + option (google.api.http) = { + post: "/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markFailed" + body: "*" + }; + } +} + +// Request for the `ListRecommendations` method. +message ListRecommendationsRequest { + // Required. The container resource on which to execute the request. + // Acceptable formats: + // + // 1. + // "projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]", + // + // LOCATION here refers to GCP Locations: + // https://cloud.google.com/about/locations/ + string parent = 1; + + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. If not specified, the server will + // determine the number of results to return. + int32 page_size = 2; + + // Optional. If present, retrieves the next batch of results from the + // preceding call to this method. `page_token` must be the value of + // `next_page_token` from the previous response. The values of other method + // parameters must be identical to those in the previous call. + string page_token = 3; + + // Filter expression to restrict the recommendations returned. Supported + // filter fields: state_info.state + // Eg: `state_info.state:"DISMISSED" or state_info.state:"FAILED" + string filter = 5; +} + +// Response to the `ListRecommendations` method. +message ListRecommendationsResponse { + // The set of recommendations for the `parent` resource. + repeated Recommendation recommendations = 1; + + // A token that can be used to request the next page of results. This field is + // empty if there are no additional results. + string next_page_token = 2; +} + +// Request to the `GetRecommendation` method. +message GetRecommendationRequest { + // Name of the recommendation. + string name = 1; +} + +// Request for the `MarkRecommendationClaimed` Method. +message MarkRecommendationClaimedRequest { + // Name of the recommendation. + string name = 1; + + // State properties to include with this state. Overwrites any existing + // `state_metadata`. + map state_metadata = 2; + + // Fingerprint of the Recommendation. Provides optimistic locking. + string etag = 3; +} + +// Request for the `MarkRecommendationSucceeded` Method. +message MarkRecommendationSucceededRequest { + // Name of the recommendation. + string name = 1; + + // State properties to include with this state. Overwrites any existing + // `state_metadata`. + map state_metadata = 2; + + // Fingerprint of the Recommendation. Provides optimistic locking. + string etag = 3; +} + +// Request for the `MarkRecommendationFailed` Method. +message MarkRecommendationFailedRequest { + // Name of the recommendation. + string name = 1; + + // State properties to include with this state. Overwrites any existing + // `state_metadata`. + map state_metadata = 2; + + // Fingerprint of the Recommendation. Provides optimistic locking. + string etag = 3; +} diff --git a/recommender/google/cloud/recommender_v1beta1/proto/recommender_service_pb2.py b/recommender/google/cloud/recommender_v1beta1/proto/recommender_service_pb2.py new file mode 100644 index 000000000000..5e7d8f029fd3 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/proto/recommender_service_pb2.py @@ -0,0 +1,924 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/recommender_v1beta1/proto/recommender_service.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.recommender_v1beta1.proto import ( + recommendation_pb2 as google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.api import client_pb2 as google_dot_api_dot_client__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/recommender_v1beta1/proto/recommender_service.proto", + package="google.cloud.recommender.v1beta1", + syntax="proto3", + serialized_options=_b( + "\n$com.google.cloud.recommender.v1beta1B\020RecommenderProtoP\001ZKgoogle.golang.org/genproto/googleapis/cloud/recommender/v1beta1;recommender\242\002\004CREC\252\002!Google.Cloud.Recommmender.V1Beta1" + ), + serialized_pb=_b( + '\n@google/cloud/recommender_v1beta1/proto/recommender_service.proto\x12 google.cloud.recommender.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a;google/cloud/recommender_v1beta1/proto/recommendation.proto\x1a#google/longrunning/operations.proto\x1a\x17google/api/client.proto"c\n\x1aListRecommendationsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t"\x81\x01\n\x1bListRecommendationsResponse\x12I\n\x0frecommendations\x18\x01 \x03(\x0b\x32\x30.google.cloud.recommender.v1beta1.Recommendation\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"(\n\x18GetRecommendationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\xe3\x01\n MarkRecommendationClaimedRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12m\n\x0estate_metadata\x18\x02 \x03(\x0b\x32U.google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.StateMetadataEntry\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x1a\x34\n\x12StateMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xe7\x01\n"MarkRecommendationSucceededRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12o\n\x0estate_metadata\x18\x02 \x03(\x0b\x32W.google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.StateMetadataEntry\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x1a\x34\n\x12StateMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xe1\x01\n\x1fMarkRecommendationFailedRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12l\n\x0estate_metadata\x18\x02 \x03(\x0b\x32T.google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.StateMetadataEntry\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x1a\x34\n\x12StateMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x32\xf7\t\n\x0bRecommender\x12\xe3\x01\n\x13ListRecommendations\x12<.google.cloud.recommender.v1beta1.ListRecommendationsRequest\x1a=.google.cloud.recommender.v1beta1.ListRecommendationsResponse"O\x82\xd3\xe4\x93\x02I\x12G/v1beta1/{parent=projects/*/locations/*/recommenders/*}/recommendations\x12\xd2\x01\n\x11GetRecommendation\x12:.google.cloud.recommender.v1beta1.GetRecommendationRequest\x1a\x30.google.cloud.recommender.v1beta1.Recommendation"O\x82\xd3\xe4\x93\x02I\x12G/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}\x12\xf1\x01\n\x19MarkRecommendationClaimed\x12\x42.google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest\x1a\x30.google.cloud.recommender.v1beta1.Recommendation"^\x82\xd3\xe4\x93\x02X"S/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markClaimed:\x01*\x12\xf7\x01\n\x1bMarkRecommendationSucceeded\x12\x44.google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest\x1a\x30.google.cloud.recommender.v1beta1.Recommendation"`\x82\xd3\xe4\x93\x02Z"U/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markSucceeded:\x01*\x12\xee\x01\n\x18MarkRecommendationFailed\x12\x41.google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest\x1a\x30.google.cloud.recommender.v1beta1.Recommendation"]\x82\xd3\xe4\x93\x02W"R/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markFailed:\x01*\x1aN\xca\x41\x1arecommender.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xb2\x01\n$com.google.cloud.recommender.v1beta1B\x10RecommenderProtoP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/recommender/v1beta1;recommender\xa2\x02\x04\x43REC\xaa\x02!Google.Cloud.Recommmender.V1Beta1b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.DESCRIPTOR, + google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + ], +) + + +_LISTRECOMMENDATIONSREQUEST = _descriptor.Descriptor( + name="ListRecommendationsRequest", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsRequest.filter", + index=3, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=255, + serialized_end=354, +) + + +_LISTRECOMMENDATIONSRESPONSE = _descriptor.Descriptor( + name="ListRecommendationsResponse", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="recommendations", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsResponse.recommendations", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=357, + serialized_end=486, +) + + +_GETRECOMMENDATIONREQUEST = _descriptor.Descriptor( + name="GetRecommendationRequest", + full_name="google.cloud.recommender.v1beta1.GetRecommendationRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.recommender.v1beta1.GetRecommendationRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=488, + serialized_end=528, +) + + +_MARKRECOMMENDATIONCLAIMEDREQUEST_STATEMETADATAENTRY = _descriptor.Descriptor( + name="StateMetadataEntry", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.StateMetadataEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.StateMetadataEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.StateMetadataEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=706, + serialized_end=758, +) + +_MARKRECOMMENDATIONCLAIMEDREQUEST = _descriptor.Descriptor( + name="MarkRecommendationClaimedRequest", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state_metadata", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.state_metadata", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="etag", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.etag", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_MARKRECOMMENDATIONCLAIMEDREQUEST_STATEMETADATAENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=531, + serialized_end=758, +) + + +_MARKRECOMMENDATIONSUCCEEDEDREQUEST_STATEMETADATAENTRY = _descriptor.Descriptor( + name="StateMetadataEntry", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.StateMetadataEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.StateMetadataEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.StateMetadataEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=706, + serialized_end=758, +) + +_MARKRECOMMENDATIONSUCCEEDEDREQUEST = _descriptor.Descriptor( + name="MarkRecommendationSucceededRequest", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state_metadata", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.state_metadata", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="etag", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.etag", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_MARKRECOMMENDATIONSUCCEEDEDREQUEST_STATEMETADATAENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=761, + serialized_end=992, +) + + +_MARKRECOMMENDATIONFAILEDREQUEST_STATEMETADATAENTRY = _descriptor.Descriptor( + name="StateMetadataEntry", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.StateMetadataEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.StateMetadataEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.StateMetadataEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=706, + serialized_end=758, +) + +_MARKRECOMMENDATIONFAILEDREQUEST = _descriptor.Descriptor( + name="MarkRecommendationFailedRequest", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state_metadata", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.state_metadata", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="etag", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.etag", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_MARKRECOMMENDATIONFAILEDREQUEST_STATEMETADATAENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=995, + serialized_end=1220, +) + +_LISTRECOMMENDATIONSRESPONSE.fields_by_name[ + "recommendations" +].message_type = ( + google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2._RECOMMENDATION +) +_MARKRECOMMENDATIONCLAIMEDREQUEST_STATEMETADATAENTRY.containing_type = ( + _MARKRECOMMENDATIONCLAIMEDREQUEST +) +_MARKRECOMMENDATIONCLAIMEDREQUEST.fields_by_name[ + "state_metadata" +].message_type = _MARKRECOMMENDATIONCLAIMEDREQUEST_STATEMETADATAENTRY +_MARKRECOMMENDATIONSUCCEEDEDREQUEST_STATEMETADATAENTRY.containing_type = ( + _MARKRECOMMENDATIONSUCCEEDEDREQUEST +) +_MARKRECOMMENDATIONSUCCEEDEDREQUEST.fields_by_name[ + "state_metadata" +].message_type = _MARKRECOMMENDATIONSUCCEEDEDREQUEST_STATEMETADATAENTRY +_MARKRECOMMENDATIONFAILEDREQUEST_STATEMETADATAENTRY.containing_type = ( + _MARKRECOMMENDATIONFAILEDREQUEST +) +_MARKRECOMMENDATIONFAILEDREQUEST.fields_by_name[ + "state_metadata" +].message_type = _MARKRECOMMENDATIONFAILEDREQUEST_STATEMETADATAENTRY +DESCRIPTOR.message_types_by_name[ + "ListRecommendationsRequest" +] = _LISTRECOMMENDATIONSREQUEST +DESCRIPTOR.message_types_by_name[ + "ListRecommendationsResponse" +] = _LISTRECOMMENDATIONSRESPONSE +DESCRIPTOR.message_types_by_name["GetRecommendationRequest"] = _GETRECOMMENDATIONREQUEST +DESCRIPTOR.message_types_by_name[ + "MarkRecommendationClaimedRequest" +] = _MARKRECOMMENDATIONCLAIMEDREQUEST +DESCRIPTOR.message_types_by_name[ + "MarkRecommendationSucceededRequest" +] = _MARKRECOMMENDATIONSUCCEEDEDREQUEST +DESCRIPTOR.message_types_by_name[ + "MarkRecommendationFailedRequest" +] = _MARKRECOMMENDATIONFAILEDREQUEST +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +ListRecommendationsRequest = _reflection.GeneratedProtocolMessageType( + "ListRecommendationsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTRECOMMENDATIONSREQUEST, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2", + __doc__="""Request for the ``ListRecommendations`` method. + + + Attributes: + parent: + Required. The container resource on which to execute the + request. Acceptable formats: 1. "projects/[PROJECT\_NUMBER]/l + ocations/[LOCATION]/recommenders/[RECOMMENDER\_ID]", LOCATION + here refers to GCP Locations: + https://cloud.google.com/about/locations/ + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. If not specified, + the server will determine the number of results to return. + page_token: + Optional. If present, retrieves the next batch of results from + the preceding call to this method. ``page_token`` must be the + value of ``next_page_token`` from the previous response. The + values of other method parameters must be identical to those + in the previous call. + filter: + Filter expression to restrict the recommendations returned. + Supported filter fields: state\_info.state Eg: + \`state\_info.state:"DISMISSED" or state\_info.state:"FAILED" + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.ListRecommendationsRequest) + ), +) +_sym_db.RegisterMessage(ListRecommendationsRequest) + +ListRecommendationsResponse = _reflection.GeneratedProtocolMessageType( + "ListRecommendationsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTRECOMMENDATIONSRESPONSE, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2", + __doc__="""Response to the ``ListRecommendations`` method. + + + Attributes: + recommendations: + The set of recommendations for the ``parent`` resource. + next_page_token: + A token that can be used to request the next page of results. + This field is empty if there are no additional results. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.ListRecommendationsResponse) + ), +) +_sym_db.RegisterMessage(ListRecommendationsResponse) + +GetRecommendationRequest = _reflection.GeneratedProtocolMessageType( + "GetRecommendationRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETRECOMMENDATIONREQUEST, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2", + __doc__="""Request to the ``GetRecommendation`` method. + + + Attributes: + name: + Name of the recommendation. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.GetRecommendationRequest) + ), +) +_sym_db.RegisterMessage(GetRecommendationRequest) + +MarkRecommendationClaimedRequest = _reflection.GeneratedProtocolMessageType( + "MarkRecommendationClaimedRequest", + (_message.Message,), + dict( + StateMetadataEntry=_reflection.GeneratedProtocolMessageType( + "StateMetadataEntry", + (_message.Message,), + dict( + DESCRIPTOR=_MARKRECOMMENDATIONCLAIMEDREQUEST_STATEMETADATAENTRY, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.StateMetadataEntry) + ), + ), + DESCRIPTOR=_MARKRECOMMENDATIONCLAIMEDREQUEST, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2", + __doc__="""Request for the ``MarkRecommendationClaimed`` Method. + + + Attributes: + name: + Name of the recommendation. + state_metadata: + State properties to include with this state. Overwrites any + existing ``state_metadata``. + etag: + Fingerprint of the Recommendation. Provides optimistic + locking. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest) + ), +) +_sym_db.RegisterMessage(MarkRecommendationClaimedRequest) +_sym_db.RegisterMessage(MarkRecommendationClaimedRequest.StateMetadataEntry) + +MarkRecommendationSucceededRequest = _reflection.GeneratedProtocolMessageType( + "MarkRecommendationSucceededRequest", + (_message.Message,), + dict( + StateMetadataEntry=_reflection.GeneratedProtocolMessageType( + "StateMetadataEntry", + (_message.Message,), + dict( + DESCRIPTOR=_MARKRECOMMENDATIONSUCCEEDEDREQUEST_STATEMETADATAENTRY, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.StateMetadataEntry) + ), + ), + DESCRIPTOR=_MARKRECOMMENDATIONSUCCEEDEDREQUEST, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2", + __doc__="""Request for the ``MarkRecommendationSucceeded`` Method. + + + Attributes: + name: + Name of the recommendation. + state_metadata: + State properties to include with this state. Overwrites any + existing ``state_metadata``. + etag: + Fingerprint of the Recommendation. Provides optimistic + locking. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest) + ), +) +_sym_db.RegisterMessage(MarkRecommendationSucceededRequest) +_sym_db.RegisterMessage(MarkRecommendationSucceededRequest.StateMetadataEntry) + +MarkRecommendationFailedRequest = _reflection.GeneratedProtocolMessageType( + "MarkRecommendationFailedRequest", + (_message.Message,), + dict( + StateMetadataEntry=_reflection.GeneratedProtocolMessageType( + "StateMetadataEntry", + (_message.Message,), + dict( + DESCRIPTOR=_MARKRECOMMENDATIONFAILEDREQUEST_STATEMETADATAENTRY, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.StateMetadataEntry) + ), + ), + DESCRIPTOR=_MARKRECOMMENDATIONFAILEDREQUEST, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2", + __doc__="""Request for the ``MarkRecommendationFailed`` Method. + + + Attributes: + name: + Name of the recommendation. + state_metadata: + State properties to include with this state. Overwrites any + existing ``state_metadata``. + etag: + Fingerprint of the Recommendation. Provides optimistic + locking. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest) + ), +) +_sym_db.RegisterMessage(MarkRecommendationFailedRequest) +_sym_db.RegisterMessage(MarkRecommendationFailedRequest.StateMetadataEntry) + + +DESCRIPTOR._options = None +_MARKRECOMMENDATIONCLAIMEDREQUEST_STATEMETADATAENTRY._options = None +_MARKRECOMMENDATIONSUCCEEDEDREQUEST_STATEMETADATAENTRY._options = None +_MARKRECOMMENDATIONFAILEDREQUEST_STATEMETADATAENTRY._options = None + +_RECOMMENDER = _descriptor.ServiceDescriptor( + name="Recommender", + full_name="google.cloud.recommender.v1beta1.Recommender", + file=DESCRIPTOR, + index=0, + serialized_options=_b( + "\312A\032recommender.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1223, + serialized_end=2494, + methods=[ + _descriptor.MethodDescriptor( + name="ListRecommendations", + full_name="google.cloud.recommender.v1beta1.Recommender.ListRecommendations", + index=0, + containing_service=None, + input_type=_LISTRECOMMENDATIONSREQUEST, + output_type=_LISTRECOMMENDATIONSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002I\022G/v1beta1/{parent=projects/*/locations/*/recommenders/*}/recommendations" + ), + ), + _descriptor.MethodDescriptor( + name="GetRecommendation", + full_name="google.cloud.recommender.v1beta1.Recommender.GetRecommendation", + index=1, + containing_service=None, + input_type=_GETRECOMMENDATIONREQUEST, + output_type=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2._RECOMMENDATION, + serialized_options=_b( + "\202\323\344\223\002I\022G/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}" + ), + ), + _descriptor.MethodDescriptor( + name="MarkRecommendationClaimed", + full_name="google.cloud.recommender.v1beta1.Recommender.MarkRecommendationClaimed", + index=2, + containing_service=None, + input_type=_MARKRECOMMENDATIONCLAIMEDREQUEST, + output_type=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2._RECOMMENDATION, + serialized_options=_b( + '\202\323\344\223\002X"S/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markClaimed:\001*' + ), + ), + _descriptor.MethodDescriptor( + name="MarkRecommendationSucceeded", + full_name="google.cloud.recommender.v1beta1.Recommender.MarkRecommendationSucceeded", + index=3, + containing_service=None, + input_type=_MARKRECOMMENDATIONSUCCEEDEDREQUEST, + output_type=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2._RECOMMENDATION, + serialized_options=_b( + '\202\323\344\223\002Z"U/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markSucceeded:\001*' + ), + ), + _descriptor.MethodDescriptor( + name="MarkRecommendationFailed", + full_name="google.cloud.recommender.v1beta1.Recommender.MarkRecommendationFailed", + index=4, + containing_service=None, + input_type=_MARKRECOMMENDATIONFAILEDREQUEST, + output_type=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2._RECOMMENDATION, + serialized_options=_b( + '\202\323\344\223\002W"R/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markFailed:\001*' + ), + ), + ], +) +_sym_db.RegisterServiceDescriptor(_RECOMMENDER) + +DESCRIPTOR.services_by_name["Recommender"] = _RECOMMENDER + +# @@protoc_insertion_point(module_scope) diff --git a/recommender/google/cloud/recommender_v1beta1/proto/recommender_service_pb2_grpc.py b/recommender/google/cloud/recommender_v1beta1/proto/recommender_service_pb2_grpc.py new file mode 100644 index 000000000000..24ffe1eb40fb --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/proto/recommender_service_pb2_grpc.py @@ -0,0 +1,155 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.recommender_v1beta1.proto import ( + recommendation_pb2 as google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2, +) +from google.cloud.recommender_v1beta1.proto import ( + recommender_service_pb2 as google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2, +) + + +class RecommenderStub(object): + """Provides recommendations for cloud customers for various categories like + performance optimization, cost savings, reliability, feature discovery, etc. + These recommendations are generated automatically based on analysis of user + resources, configuration and monitoring metrics. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListRecommendations = channel.unary_unary( + "/google.cloud.recommender.v1beta1.Recommender/ListRecommendations", + request_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.ListRecommendationsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.ListRecommendationsResponse.FromString, + ) + self.GetRecommendation = channel.unary_unary( + "/google.cloud.recommender.v1beta1.Recommender/GetRecommendation", + request_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.GetRecommendationRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.FromString, + ) + self.MarkRecommendationClaimed = channel.unary_unary( + "/google.cloud.recommender.v1beta1.Recommender/MarkRecommendationClaimed", + request_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.MarkRecommendationClaimedRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.FromString, + ) + self.MarkRecommendationSucceeded = channel.unary_unary( + "/google.cloud.recommender.v1beta1.Recommender/MarkRecommendationSucceeded", + request_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.MarkRecommendationSucceededRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.FromString, + ) + self.MarkRecommendationFailed = channel.unary_unary( + "/google.cloud.recommender.v1beta1.Recommender/MarkRecommendationFailed", + request_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.MarkRecommendationFailedRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.FromString, + ) + + +class RecommenderServicer(object): + """Provides recommendations for cloud customers for various categories like + performance optimization, cost savings, reliability, feature discovery, etc. + These recommendations are generated automatically based on analysis of user + resources, configuration and monitoring metrics. + """ + + def ListRecommendations(self, request, context): + """Lists recommendations for a Cloud project. Requires the recommender.*.list + IAM permission for the specified recommender. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetRecommendation(self, request, context): + """Gets the requested recommendation. Requires the recommender.*.get + IAM permission for the specified recommender. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def MarkRecommendationClaimed(self, request, context): + """Mark the Recommendation State as Claimed. Users can use this method to + indicate to the Recommender API that they are starting to apply the + recommendation themselves. This stops the recommendation content from being + updated. + + MarkRecommendationClaimed can be applied to recommendations in CLAIMED, + SUCCEEDED, FAILED, or ACTIVE state. + + Requires the recommender.*.update IAM permission for the specified + recommender. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def MarkRecommendationSucceeded(self, request, context): + """Mark the Recommendation State as Succeeded. Users can use this method to + indicate to the Recommender API that they have applied the recommendation + themselves, and the operation was successful. This stops the recommendation + content from being updated. + + MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, + CLAIMED, SUCCEEDED, or FAILED state. + + Requires the recommender.*.update IAM permission for the specified + recommender. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def MarkRecommendationFailed(self, request, context): + """Mark the Recommendation State as Failed. Users can use this method to + indicate to the Recommender API that they have applied the recommendation + themselves, and the operation failed. This stops the recommendation content + from being updated. + + MarkRecommendationFailed can be applied to recommendations in ACTIVE, + CLAIMED, SUCCEEDED, or FAILED state. + + Requires the recommender.*.update IAM permission for the specified + recommender. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_RecommenderServicer_to_server(servicer, server): + rpc_method_handlers = { + "ListRecommendations": grpc.unary_unary_rpc_method_handler( + servicer.ListRecommendations, + request_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.ListRecommendationsRequest.FromString, + response_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.ListRecommendationsResponse.SerializeToString, + ), + "GetRecommendation": grpc.unary_unary_rpc_method_handler( + servicer.GetRecommendation, + request_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.GetRecommendationRequest.FromString, + response_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.SerializeToString, + ), + "MarkRecommendationClaimed": grpc.unary_unary_rpc_method_handler( + servicer.MarkRecommendationClaimed, + request_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.MarkRecommendationClaimedRequest.FromString, + response_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.SerializeToString, + ), + "MarkRecommendationSucceeded": grpc.unary_unary_rpc_method_handler( + servicer.MarkRecommendationSucceeded, + request_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.MarkRecommendationSucceededRequest.FromString, + response_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.SerializeToString, + ), + "MarkRecommendationFailed": grpc.unary_unary_rpc_method_handler( + servicer.MarkRecommendationFailed, + request_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.MarkRecommendationFailedRequest.FromString, + response_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.cloud.recommender.v1beta1.Recommender", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/recommender/google/cloud/recommender_v1beta1/types.py b/recommender/google/cloud/recommender_v1beta1/types.py new file mode 100644 index 000000000000..205bae43ea9c --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/types.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import +import sys + +from google.api_core.protobuf_helpers import get_messages + +from google.cloud.recommender_v1beta1.proto import recommendation_pb2 +from google.cloud.recommender_v1beta1.proto import recommender_service_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import struct_pb2 +from google.protobuf import timestamp_pb2 +from google.type import money_pb2 + + +_shared_modules = [duration_pb2, struct_pb2, timestamp_pb2, money_pb2] + +_local_modules = [recommendation_pb2, recommender_service_pb2] + +names = [] + +for module in _shared_modules: # pragma: NO COVER + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) +for module in _local_modules: + for name, message in get_messages(module).items(): + message.__module__ = "google.cloud.recommender_v1beta1.types" + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/recommender/mypy.ini b/recommender/mypy.ini new file mode 100644 index 000000000000..f23e6b533aad --- /dev/null +++ b/recommender/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.5 +namespace_packages = True diff --git a/recommender/noxfile.py b/recommender/noxfile.py new file mode 100644 index 000000000000..1f6797a2207f --- /dev/null +++ b/recommender/noxfile.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import +import os +import shutil + +import nox + + +LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) +BLACK_VERSION = "black==19.3b0" +BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +if os.path.exists("samples"): + BLACK_PATHS.append("samples") + + +@nox.session(python="3.7") +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) + session.run("black", "--check", *BLACK_PATHS) + session.run("flake8", "google", "tests") + + +@nox.session(python="3.6") +def blacken(session): + """Run black. + + Format code to uniform standard. + + This currently uses Python 3.6 due to the automated Kokoro run of synthtool. + That run uses an image that doesn't have 3.6 installed. Before updating this + check the state of the `gcp_ubuntu_config` we use for that Kokoro run. + """ + session.install(BLACK_VERSION) + session.run("black", *BLACK_PATHS) + + +@nox.session(python="3.7") +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def default(session): + # Install all test dependencies, then install this package in-place. + session.install("mock", "pytest", "pytest-cov") + for local_dep in LOCAL_DEPS: + session.install("-e", local_dep) + session.install("-e", ".") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + "--cov=google.cloud", + "--cov=tests.unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=["3.5", "3.6", "3.7"]) +def unit(session): + """Run the unit test suite.""" + default(session) + + +@nox.session(python=["3.7"]) +def system(session): + """Run the system test suite.""" + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + # Sanity check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install("mock", "pytest") + for local_dep in LOCAL_DEPS: + session.install("-e", local_dep) + session.install("-e", "../test_utils/") + session.install("-e", ".") + + # Run py.test against the system tests. + if system_test_exists: + session.run("py.test", "--quiet", system_test_path, *session.posargs) + if system_test_folder_exists: + session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + + +@nox.session(python="3.7") +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.7") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/recommender/setup.cfg b/recommender/setup.cfg new file mode 100644 index 000000000000..3bd555500e37 --- /dev/null +++ b/recommender/setup.cfg @@ -0,0 +1,3 @@ +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/recommender/setup.py b/recommender/setup.py new file mode 100644 index 000000000000..31a46fd840eb --- /dev/null +++ b/recommender/setup.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os + +import setuptools + +name = "google-cloud-recommender" +description = "Cloud Recommender API client library" +version = "0.1.0" +release_status = "Development Status :: 3 - Alpha" +dependencies = ["google-api-core[grpc] >= 1.14.0, < 2.0.0dev"] + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package for package in setuptools.find_packages() if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url="https://github.com/GoogleCloudPlatform/google-cloud-python", + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + namespace_packages=namespaces, + install_requires=dependencies, + python_requires=">=3.5", + include_package_data=True, + zip_safe=False, +) diff --git a/recommender/synth.metadata b/recommender/synth.metadata new file mode 100644 index 000000000000..41fd2c0857c7 --- /dev/null +++ b/recommender/synth.metadata @@ -0,0 +1,39 @@ +{ + "updateTime": "2019-10-05T12:31:38.542410Z", + "sources": [ + { + "generator": { + "name": "artman", + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" + } + }, + { + "template": { + "name": "python_library", + "origin": "synthtool.gcp", + "version": "2019.5.2" + } + } + ], + "destinations": [ + { + "client": { + "source": "googleapis", + "apiName": "recommender", + "apiVersion": "v1beta1", + "language": "python", + "generator": "gapic", + "config": "google/cloud/recommender/artman_recommender_v1beta1.yaml" + } + } + ] +} \ No newline at end of file diff --git a/recommender/synth.py b/recommender/synth.py new file mode 100644 index 000000000000..93d445f42f11 --- /dev/null +++ b/recommender/synth.py @@ -0,0 +1,42 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated parts of this library.""" +import re + +import synthtool as s +from synthtool import gcp + +gapic = gcp.GAPICGenerator() +versions = ["v1beta1"] +common = gcp.CommonTemplates() + + +# ---------------------------------------------------------------------------- +# Generate Cloud Recommender +# ---------------------------------------------------------------------------- +for version in versions: + library = gapic.py_library( + "recommender", version, + include_protos=True + ) + s.move(library, excludes=['nox.py', 'docs/index.rst', 'README.rst', 'setup.py']) + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- +templated_files = common.py_library(unit_cov_level=97, cov_level=100) +s.move(templated_files, excludes=['noxfile.py']) + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) \ No newline at end of file diff --git a/recommender/tests/unit/gapic/v1beta1/test_recommender_client_v1beta1.py b/recommender/tests/unit/gapic/v1beta1/test_recommender_client_v1beta1.py new file mode 100644 index 000000000000..dafe315484bf --- /dev/null +++ b/recommender/tests/unit/gapic/v1beta1/test_recommender_client_v1beta1.py @@ -0,0 +1,324 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests.""" + +import mock +import pytest + +from google.cloud import recommender_v1beta1 +from google.cloud.recommender_v1beta1.proto import recommendation_pb2 +from google.cloud.recommender_v1beta1.proto import recommender_service_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, method, request_serializer=None, response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestRecommenderClient(object): + def test_list_recommendations(self): + # Setup Expected Response + next_page_token = "" + recommendations_element = {} + recommendations = [recommendations_element] + expected_response = { + "next_page_token": next_page_token, + "recommendations": recommendations, + } + expected_response = recommender_service_pb2.ListRecommendationsResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup Request + parent = client.recommender_path("[PROJECT]", "[LOCATION]", "[RECOMMENDER]") + + paged_list_response = client.list_recommendations(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.recommendations[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = recommender_service_pb2.ListRecommendationsRequest( + parent=parent + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_recommendations_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup request + parent = client.recommender_path("[PROJECT]", "[LOCATION]", "[RECOMMENDER]") + + paged_list_response = client.list_recommendations(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_recommendation(self): + # Setup Expected Response + name_2 = "name2-1052831874" + description = "description-1724546052" + recommender_subtype = "recommenderSubtype-1488504412" + etag = "etag3123477" + expected_response = { + "name": name_2, + "description": description, + "recommender_subtype": recommender_subtype, + "etag": etag, + } + expected_response = recommendation_pb2.Recommendation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup Request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + + response = client.get_recommendation(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = recommender_service_pb2.GetRecommendationRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_recommendation_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + + with pytest.raises(CustomException): + client.get_recommendation(name) + + def test_mark_recommendation_claimed(self): + # Setup Expected Response + name_2 = "name2-1052831874" + description = "description-1724546052" + recommender_subtype = "recommenderSubtype-1488504412" + etag_2 = "etag2-1293302904" + expected_response = { + "name": name_2, + "description": description, + "recommender_subtype": recommender_subtype, + "etag": etag_2, + } + expected_response = recommendation_pb2.Recommendation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup Request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + etag = "etag3123477" + + response = client.mark_recommendation_claimed(name, etag) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = recommender_service_pb2.MarkRecommendationClaimedRequest( + name=name, etag=etag + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_mark_recommendation_claimed_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + etag = "etag3123477" + + with pytest.raises(CustomException): + client.mark_recommendation_claimed(name, etag) + + def test_mark_recommendation_succeeded(self): + # Setup Expected Response + name_2 = "name2-1052831874" + description = "description-1724546052" + recommender_subtype = "recommenderSubtype-1488504412" + etag_2 = "etag2-1293302904" + expected_response = { + "name": name_2, + "description": description, + "recommender_subtype": recommender_subtype, + "etag": etag_2, + } + expected_response = recommendation_pb2.Recommendation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup Request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + etag = "etag3123477" + + response = client.mark_recommendation_succeeded(name, etag) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = recommender_service_pb2.MarkRecommendationSucceededRequest( + name=name, etag=etag + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_mark_recommendation_succeeded_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + etag = "etag3123477" + + with pytest.raises(CustomException): + client.mark_recommendation_succeeded(name, etag) + + def test_mark_recommendation_failed(self): + # Setup Expected Response + name_2 = "name2-1052831874" + description = "description-1724546052" + recommender_subtype = "recommenderSubtype-1488504412" + etag_2 = "etag2-1293302904" + expected_response = { + "name": name_2, + "description": description, + "recommender_subtype": recommender_subtype, + "etag": etag_2, + } + expected_response = recommendation_pb2.Recommendation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup Request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + etag = "etag3123477" + + response = client.mark_recommendation_failed(name, etag) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = recommender_service_pb2.MarkRecommendationFailedRequest( + name=name, etag=etag + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_mark_recommendation_failed_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + etag = "etag3123477" + + with pytest.raises(CustomException): + client.mark_recommendation_failed(name, etag) diff --git a/redis/docs/conf.py b/redis/docs/conf.py index 55f004284af5..4fad6c23f7cc 100644 --- a/redis/docs/conf.py +++ b/redis/docs/conf.py @@ -341,7 +341,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/redis/google/cloud/redis_v1/gapic/cloud_redis_client.py b/redis/google/cloud/redis_v1/gapic/cloud_redis_client.py index 2cec791768ac..5060fd7c5f77 100644 --- a/redis/google/cloud/redis_v1/gapic/cloud_redis_client.py +++ b/redis/google/cloud/redis_v1/gapic/cloud_redis_client.py @@ -557,7 +557,7 @@ def update_instance( >>> paths_element_2 = 'memory_size_gb' >>> paths = [paths_element, paths_element_2] >>> update_mask = {'paths': paths} - >>> display_name = ' instance.memory_size_gb=4' + >>> display_name = ' instance.memory_size_gb=4' >>> instance = {'display_name': display_name} >>> >>> response = client.update_instance(update_mask, instance) @@ -667,7 +667,8 @@ def import_instance( >>> >>> client = redis_v1.CloudRedisClient() >>> - >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') + >>> # TODO: Initialize `name`: + >>> name = '' >>> >>> # TODO: Initialize `input_config`: >>> input_config = {} @@ -768,7 +769,8 @@ def export_instance( >>> >>> client = redis_v1.CloudRedisClient() >>> - >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') + >>> # TODO: Initialize `name`: + >>> name = '' >>> >>> # TODO: Initialize `output_config`: >>> output_config = {} @@ -851,7 +853,7 @@ def export_instance( def failover_instance( self, name, - data_protection_mode, + data_protection_mode=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -862,16 +864,12 @@ def failover_instance( Example: >>> from google.cloud import redis_v1 - >>> from google.cloud.redis_v1 import enums >>> >>> client = redis_v1.CloudRedisClient() >>> >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> - >>> # TODO: Initialize `data_protection_mode`: - >>> data_protection_mode = enums.FailoverInstanceRequest.DataProtectionMode.DATA_PROTECTION_MODE_UNSPECIFIED - >>> - >>> response = client.failover_instance(name, data_protection_mode) + >>> response = client.failover_instance(name) >>> >>> def callback(operation_future): ... # Handle result. diff --git a/redis/google/cloud/redis_v1/proto/cloud_redis.proto b/redis/google/cloud/redis_v1/proto/cloud_redis.proto index 65366ca432ab..1c9cd495da6e 100644 --- a/redis/google/cloud/redis_v1/proto/cloud_redis.proto +++ b/redis/google/cloud/redis_v1/proto/cloud_redis.proto @@ -18,10 +18,12 @@ syntax = "proto3"; package google.cloud.redis.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; -import "google/api/client.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/redis/v1;redis"; option java_multiple_files = true; @@ -45,6 +47,7 @@ option java_package = "com.google.cloud.redis.v1"; // * `projects/redpepper-1290/locations/us-central1/instances/my-redis` service CloudRedis { option (google.api.default_host) = "redis.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; // Lists all Redis instances owned by a project in either the specified // location (region) or all locations. @@ -58,6 +61,7 @@ service CloudRedis { option (google.api.http) = { get: "/v1/{parent=projects/*/locations/*}/instances" }; + option (google.api.method_signature) = "parent"; } // Gets the details of a specific Redis instance. @@ -65,6 +69,7 @@ service CloudRedis { option (google.api.http) = { get: "/v1/{name=projects/*/locations/*/instances/*}" }; + option (google.api.method_signature) = "name"; } // Creates a Redis instance based on the specified tier and memory size. @@ -84,6 +89,11 @@ service CloudRedis { post: "/v1/{parent=projects/*/locations/*}/instances" body: "instance" }; + option (google.api.method_signature) = "parent,instance_id,instance"; + option (google.longrunning.operation_info) = { + response_type: "google.cloud.redis.v1.Instance" + metadata_type: "google.cloud.redis.v1.OperationMetadata" + }; } // Updates the metadata and configuration of a specific Redis instance. @@ -96,6 +106,11 @@ service CloudRedis { patch: "/v1/{instance.name=projects/*/locations/*/instances/*}" body: "instance" }; + option (google.api.method_signature) = "update_mask,instance"; + option (google.longrunning.operation_info) = { + response_type: "google.cloud.redis.v1.Instance" + metadata_type: "google.cloud.redis.v1.OperationMetadata" + }; } // Import a Redis RDB snapshot file from Cloud Storage into a Redis instance. @@ -111,6 +126,11 @@ service CloudRedis { post: "/v1/{name=projects/*/locations/*/instances/*}:import" body: "*" }; + option (google.api.method_signature) = "name,input_config"; + option (google.longrunning.operation_info) = { + response_type: "google.cloud.redis.v1.Instance" + metadata_type: "google.cloud.redis.v1.OperationMetadata" + }; } // Export Redis instance data into a Redis RDB format file in Cloud Storage. @@ -124,6 +144,11 @@ service CloudRedis { post: "/v1/{name=projects/*/locations/*/instances/*}:export" body: "*" }; + option (google.api.method_signature) = "name,output_config"; + option (google.longrunning.operation_info) = { + response_type: "google.cloud.redis.v1.Instance" + metadata_type: "google.cloud.redis.v1.OperationMetadata" + }; } // Initiates a failover of the master node to current replica node for a @@ -133,6 +158,11 @@ service CloudRedis { post: "/v1/{name=projects/*/locations/*/instances/*}:failover" body: "*" }; + option (google.api.method_signature) = "name,data_protection_mode"; + option (google.longrunning.operation_info) = { + response_type: "google.cloud.redis.v1.Instance" + metadata_type: "google.cloud.redis.v1.OperationMetadata" + }; } // Deletes a specific Redis instance. Instance stops serving and data is @@ -141,11 +171,21 @@ service CloudRedis { option (google.api.http) = { delete: "/v1/{name=projects/*/locations/*/instances/*}" }; + option (google.api.method_signature) = "name"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "google.cloud.redis.v1.OperationMetadata" + }; } } // A Google Cloud Redis instance. message Instance { + option (google.api.resource) = { + type: "redis.googleapis.com/Instance" + pattern: "projects/{project}/locations/{location}/instances/{instance}" + }; + // Represents the different states of a Redis instance. enum State { // Not set. @@ -199,7 +239,7 @@ message Instance { // specific zone (or collection of zones for cross-zone instances) an instance // should be provisioned in. Refer to [location_id] and // [alternative_location_id] fields for more details. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // An arbitrary and optional user-provided name for the instance. string display_name = 2; @@ -212,12 +252,12 @@ message Instance { // instances will be created across two zones for protection against zonal // failures. If [alternative_location_id] is also provided, it must be // different from [location_id]. - string location_id = 4; + string location_id = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. Only applicable to STANDARD_HA tier which protects the instance // against zonal failures by provisioning it across two zones. If provided, it // must be a different zone from the one provided in [location_id]. - string alternative_location_id = 5; + string alternative_location_id = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. The version of Redis software. // If not provided, latest supported version will be used. Updating the @@ -226,37 +266,37 @@ message Instance { // // * `REDIS_4_0` for Redis 4.0 compatibility (default) // * `REDIS_3_2` for Redis 3.2 compatibility - string redis_version = 7; + string redis_version = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The CIDR range of internal addresses that are reserved for this // instance. If not provided, the service will choose an unused /29 block, // for example, 10.0.0.0/29 or 192.168.0.0/29. Ranges must be unique // and non-overlapping with existing subnets in an authorized network. - string reserved_ip_range = 9; + string reserved_ip_range = 9 [(google.api.field_behavior) = OPTIONAL]; // Output only. Hostname or IP address of the exposed Redis endpoint used by // clients to connect to the service. - string host = 10; + string host = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The port number of the exposed Redis endpoint. - int32 port = 11; + int32 port = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The current zone where the Redis endpoint is placed. For Basic // Tier instances, this will always be the same as the [location_id] // provided by the user at creation time. For Standard Tier instances, // this can be either [location_id] or [alternative_location_id] and can // change after a failover event. - string current_location_id = 12; + string current_location_id = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time the instance was created. - google.protobuf.Timestamp create_time = 13; + google.protobuf.Timestamp create_time = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The current state of this instance. - State state = 14; + State state = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Additional information about the current status of this // instance, if available. - string status_message = 15; + string status_message = 15 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. Redis configuration parameters, according to // http://redis.io/topics/config. Currently, the only supported parameters @@ -272,26 +312,26 @@ message Instance { // * activedefrag // * lfu-log-factor // * lfu-decay-time - map redis_configs = 16; + map redis_configs = 16 [(google.api.field_behavior) = OPTIONAL]; // Required. The service tier of the instance. - Tier tier = 17; + Tier tier = 17 [(google.api.field_behavior) = REQUIRED]; // Required. Redis memory size in GiB. - int32 memory_size_gb = 18; + int32 memory_size_gb = 18 [(google.api.field_behavior) = REQUIRED]; // Optional. The full name of the Google Compute Engine // [network](/compute/docs/networks-and-firewalls#networks) to which the // instance is connected. If left unspecified, the `default` network // will be used. - string authorized_network = 20; + string authorized_network = 20 [(google.api.field_behavior) = OPTIONAL]; // Output only. Cloud IAM identity used by import / export operations to // transfer data to/from Cloud Storage. Format is // "serviceAccount:". The value may change over time // for a given instance so should be checked before each import/export // operation. - string persistence_iam_identity = 21; + string persistence_iam_identity = 21 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Request for [ListInstances][google.cloud.redis.v1.CloudRedis.ListInstances]. @@ -299,7 +339,12 @@ message ListInstancesRequest { // Required. The resource name of the instance location using the form: // `projects/{project_id}/locations/{location_id}` // where `location_id` refers to a GCP region. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // The maximum number of items to return. // @@ -342,7 +387,12 @@ message GetInstanceRequest { // Required. Redis instance resource name using the form: // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` // where `location_id` refers to a GCP region. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "redis.googleapis.com/Instance" + } + ]; } // Request for [CreateInstance][google.cloud.redis.v1.CloudRedis.CreateInstance]. @@ -350,7 +400,12 @@ message CreateInstanceRequest { // Required. The resource name of the instance location using the form: // `projects/{project_id}/locations/{location_id}` // where `location_id` refers to a GCP region. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Required. The logical name of the Redis instance in the customer project // with the following restrictions: @@ -360,10 +415,10 @@ message CreateInstanceRequest { // * Must be between 1-40 characters. // * Must end with a number or a letter. // * Must be unique within the customer project / location - string instance_id = 2; + string instance_id = 2 [(google.api.field_behavior) = REQUIRED]; // Required. A Redis [Instance] resource - Instance instance = 3; + Instance instance = 3 [(google.api.field_behavior) = REQUIRED]; } // Request for [UpdateInstance][google.cloud.redis.v1.CloudRedis.UpdateInstance]. @@ -376,11 +431,11 @@ message UpdateInstanceRequest { // * `labels` // * `memorySizeGb` // * `redisConfig` - google.protobuf.FieldMask update_mask = 1; + google.protobuf.FieldMask update_mask = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Update description. // Only fields specified in update_mask are updated. - Instance instance = 2; + Instance instance = 2 [(google.api.field_behavior) = REQUIRED]; } // Request for [DeleteInstance][google.cloud.redis.v1.CloudRedis.DeleteInstance]. @@ -388,13 +443,18 @@ message DeleteInstanceRequest { // Required. Redis instance resource name using the form: // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` // where `location_id` refers to a GCP region. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "redis.googleapis.com/Instance" + } + ]; } // The Cloud Storage location for the input content message GcsSource { // Required. Source data URI. (e.g. 'gs://my_bucket/my_object'). - string uri = 1; + string uri = 1 [(google.api.field_behavior) = REQUIRED]; } // The input content @@ -411,17 +471,17 @@ message ImportInstanceRequest { // Required. Redis instance resource name using the form: // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` // where `location_id` refers to a GCP region. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Specify data to be imported. - InputConfig input_config = 3; + InputConfig input_config = 3 [(google.api.field_behavior) = REQUIRED]; } // The Cloud Storage location for the output content message GcsDestination { // Required. Data destination URI (e.g. // 'gs://my_bucket/my_object'). Existing files will be overwritten. - string uri = 1; + string uri = 1 [(google.api.field_behavior) = REQUIRED]; } // The output content @@ -438,10 +498,10 @@ message ExportInstanceRequest { // Required. Redis instance resource name using the form: // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` // where `location_id` refers to a GCP region. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Specify data to be exported. - OutputConfig output_config = 3; + OutputConfig output_config = 3 [(google.api.field_behavior) = REQUIRED]; } // Request for [Failover][google.cloud.redis.v1.CloudRedis.FailoverInstance]. @@ -464,11 +524,16 @@ message FailoverInstanceRequest { // Required. Redis instance resource name using the form: // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` // where `location_id` refers to a GCP region. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "redis.googleapis.com/Instance" + } + ]; // Optional. Available data protection modes that the user can choose. If it's // unspecified, data protection mode will be LIMITED_DATA_LOSS by default. - DataProtectionMode data_protection_mode = 2; + DataProtectionMode data_protection_mode = 2 [(google.api.field_behavior) = OPTIONAL]; } // Represents the v1 metadata of the long-running operation. @@ -504,7 +569,7 @@ message LocationMetadata { // by the lowercase ID of each zone, as defined by GCE. These keys can be // specified in `location_id` or `alternative_location_id` fields when // creating a Redis instance. - map available_zones = 1; + map available_zones = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Defines specific information for a particular zone. Currently empty and diff --git a/redis/google/cloud/redis_v1/proto/cloud_redis_pb2.py b/redis/google/cloud/redis_v1/proto/cloud_redis_pb2.py index 92983de8facf..14a934a75fae 100644 --- a/redis/google/cloud/redis_v1/proto/cloud_redis_pb2.py +++ b/redis/google/cloud/redis_v1/proto/cloud_redis_pb2.py @@ -16,12 +16,14 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.longrunning import ( operations_pb2 as google_dot_longrunning_dot_operations__pb2, ) from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -32,14 +34,16 @@ "\n\031com.google.cloud.redis.v1B\030CloudRedisServiceV1ProtoP\001Z:google.golang.org/genproto/googleapis/cloud/redis/v1;redis" ), serialized_pb=_b( - '\n-google/cloud/redis_v1/proto/cloud_redis.proto\x12\x15google.cloud.redis.v1\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/api/client.proto"\x94\x07\n\x08Instance\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12;\n\x06labels\x18\x03 \x03(\x0b\x32+.google.cloud.redis.v1.Instance.LabelsEntry\x12\x13\n\x0blocation_id\x18\x04 \x01(\t\x12\x1f\n\x17\x61lternative_location_id\x18\x05 \x01(\t\x12\x15\n\rredis_version\x18\x07 \x01(\t\x12\x19\n\x11reserved_ip_range\x18\t \x01(\t\x12\x0c\n\x04host\x18\n \x01(\t\x12\x0c\n\x04port\x18\x0b \x01(\x05\x12\x1b\n\x13\x63urrent_location_id\x18\x0c \x01(\t\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x05state\x18\x0e \x01(\x0e\x32%.google.cloud.redis.v1.Instance.State\x12\x16\n\x0estatus_message\x18\x0f \x01(\t\x12H\n\rredis_configs\x18\x10 \x03(\x0b\x32\x31.google.cloud.redis.v1.Instance.RedisConfigsEntry\x12\x32\n\x04tier\x18\x11 \x01(\x0e\x32$.google.cloud.redis.v1.Instance.Tier\x12\x16\n\x0ememory_size_gb\x18\x12 \x01(\x05\x12\x1a\n\x12\x61uthorized_network\x18\x14 \x01(\t\x12 \n\x18persistence_iam_identity\x18\x15 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x33\n\x11RedisConfigsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x94\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x0c\n\x08UPDATING\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\r\n\tREPAIRING\x10\x05\x12\x0f\n\x0bMAINTENANCE\x10\x06\x12\r\n\tIMPORTING\x10\x08\x12\x10\n\x0c\x46\x41ILING_OVER\x10\t"8\n\x04Tier\x12\x14\n\x10TIER_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x0f\n\x0bSTANDARD_HA\x10\x03"M\n\x14ListInstancesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"y\n\x15ListInstancesResponse\x12\x32\n\tinstances\x18\x01 \x03(\x0b\x32\x1f.google.cloud.redis.v1.Instance\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12\x13\n\x0bunreachable\x18\x03 \x03(\t""\n\x12GetInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"o\n\x15\x43reateInstanceRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x13\n\x0binstance_id\x18\x02 \x01(\t\x12\x31\n\x08instance\x18\x03 \x01(\x0b\x32\x1f.google.cloud.redis.v1.Instance"{\n\x15UpdateInstanceRequest\x12/\n\x0bupdate_mask\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x31\n\x08instance\x18\x02 \x01(\x0b\x32\x1f.google.cloud.redis.v1.Instance"%\n\x15\x44\x65leteInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x18\n\tGcsSource\x12\x0b\n\x03uri\x18\x01 \x01(\t"O\n\x0bInputConfig\x12\x36\n\ngcs_source\x18\x01 \x01(\x0b\x32 .google.cloud.redis.v1.GcsSourceH\x00\x42\x08\n\x06source"_\n\x15ImportInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x0cinput_config\x18\x03 \x01(\x0b\x32".google.cloud.redis.v1.InputConfig"\x1d\n\x0eGcsDestination\x12\x0b\n\x03uri\x18\x01 \x01(\t"_\n\x0cOutputConfig\x12@\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32%.google.cloud.redis.v1.GcsDestinationH\x00\x42\r\n\x0b\x64\x65stination"a\n\x15\x45xportInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12:\n\routput_config\x18\x03 \x01(\x0b\x32#.google.cloud.redis.v1.OutputConfig"\xf0\x01\n\x17\x46\x61iloverInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12_\n\x14\x64\x61ta_protection_mode\x18\x02 \x01(\x0e\x32\x41.google.cloud.redis.v1.FailoverInstanceRequest.DataProtectionMode"f\n\x12\x44\x61taProtectionMode\x12$\n DATA_PROTECTION_MODE_UNSPECIFIED\x10\x00\x12\x15\n\x11LIMITED_DATA_LOSS\x10\x01\x12\x13\n\x0f\x46ORCE_DATA_LOSS\x10\x02"\xd6\x01\n\x11OperationMetadata\x12/\n\x0b\x63reate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0e\n\x06target\x18\x03 \x01(\t\x12\x0c\n\x04verb\x18\x04 \x01(\t\x12\x15\n\rstatus_detail\x18\x05 \x01(\t\x12\x18\n\x10\x63\x61ncel_requested\x18\x06 \x01(\x08\x12\x13\n\x0b\x61pi_version\x18\x07 \x01(\t"\xc4\x01\n\x10LocationMetadata\x12T\n\x0f\x61vailable_zones\x18\x01 \x03(\x0b\x32;.google.cloud.redis.v1.LocationMetadata.AvailableZonesEntry\x1aZ\n\x13\x41vailableZonesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x32\n\x05value\x18\x02 \x01(\x0b\x32#.google.cloud.redis.v1.ZoneMetadata:\x02\x38\x01"\x0e\n\x0cZoneMetadata2\xa7\n\n\nCloudRedis\x12\xa1\x01\n\rListInstances\x12+.google.cloud.redis.v1.ListInstancesRequest\x1a,.google.cloud.redis.v1.ListInstancesResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{parent=projects/*/locations/*}/instances\x12\x90\x01\n\x0bGetInstance\x12).google.cloud.redis.v1.GetInstanceRequest\x1a\x1f.google.cloud.redis.v1.Instance"5\x82\xd3\xe4\x93\x02/\x12-/v1/{name=projects/*/locations/*/instances/*}\x12\x9e\x01\n\x0e\x43reateInstance\x12,.google.cloud.redis.v1.CreateInstanceRequest\x1a\x1d.google.longrunning.Operation"?\x82\xd3\xe4\x93\x02\x39"-/v1/{parent=projects/*/locations/*}/instances:\x08instance\x12\xa7\x01\n\x0eUpdateInstance\x12,.google.cloud.redis.v1.UpdateInstanceRequest\x1a\x1d.google.longrunning.Operation"H\x82\xd3\xe4\x93\x02\x42\x32\x36/v1/{instance.name=projects/*/locations/*/instances/*}:\x08instance\x12\x9e\x01\n\x0eImportInstance\x12,.google.cloud.redis.v1.ImportInstanceRequest\x1a\x1d.google.longrunning.Operation"?\x82\xd3\xe4\x93\x02\x39"4/v1/{name=projects/*/locations/*/instances/*}:import:\x01*\x12\x9e\x01\n\x0e\x45xportInstance\x12,.google.cloud.redis.v1.ExportInstanceRequest\x1a\x1d.google.longrunning.Operation"?\x82\xd3\xe4\x93\x02\x39"4/v1/{name=projects/*/locations/*/instances/*}:export:\x01*\x12\xa4\x01\n\x10\x46\x61iloverInstance\x12..google.cloud.redis.v1.FailoverInstanceRequest\x1a\x1d.google.longrunning.Operation"A\x82\xd3\xe4\x93\x02;"6/v1/{name=projects/*/locations/*/instances/*}:failover:\x01*\x12\x94\x01\n\x0e\x44\x65leteInstance\x12,.google.cloud.redis.v1.DeleteInstanceRequest\x1a\x1d.google.longrunning.Operation"5\x82\xd3\xe4\x93\x02/*-/v1/{name=projects/*/locations/*/instances/*}\x1a\x17\xca\x41\x14redis.googleapis.comBs\n\x19\x63om.google.cloud.redis.v1B\x18\x43loudRedisServiceV1ProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/redis/v1;redisb\x06proto3' + '\n-google/cloud/redis_v1/proto/cloud_redis.proto\x12\x15google.cloud.redis.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a#google/longrunning/operations.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc6\x08\n\x08Instance\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12;\n\x06labels\x18\x03 \x03(\x0b\x32+.google.cloud.redis.v1.Instance.LabelsEntry\x12\x18\n\x0blocation_id\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x61lternative_location_id\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x1a\n\rredis_version\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11reserved_ip_range\x18\t \x01(\tB\x03\xe0\x41\x01\x12\x11\n\x04host\x18\n \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04port\x18\x0b \x01(\x05\x42\x03\xe0\x41\x03\x12 \n\x13\x63urrent_location_id\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12\x34\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x39\n\x05state\x18\x0e \x01(\x0e\x32%.google.cloud.redis.v1.Instance.StateB\x03\xe0\x41\x03\x12\x1b\n\x0estatus_message\x18\x0f \x01(\tB\x03\xe0\x41\x03\x12M\n\rredis_configs\x18\x10 \x03(\x0b\x32\x31.google.cloud.redis.v1.Instance.RedisConfigsEntryB\x03\xe0\x41\x01\x12\x37\n\x04tier\x18\x11 \x01(\x0e\x32$.google.cloud.redis.v1.Instance.TierB\x03\xe0\x41\x02\x12\x1b\n\x0ememory_size_gb\x18\x12 \x01(\x05\x42\x03\xe0\x41\x02\x12\x1f\n\x12\x61uthorized_network\x18\x14 \x01(\tB\x03\xe0\x41\x01\x12%\n\x18persistence_iam_identity\x18\x15 \x01(\tB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x33\n\x11RedisConfigsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x94\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x0c\n\x08UPDATING\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\r\n\tREPAIRING\x10\x05\x12\x0f\n\x0bMAINTENANCE\x10\x06\x12\r\n\tIMPORTING\x10\x08\x12\x10\n\x0c\x46\x41ILING_OVER\x10\t"8\n\x04Tier\x12\x14\n\x10TIER_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x0f\n\x0bSTANDARD_HA\x10\x03:`\xea\x41]\n\x1dredis.googleapis.com/Instance\x12\x82\xd3\xe4\x93\x02/\x12-/v1/{parent=projects/*/locations/*}/instances\xda\x41\x06parent\x12\x97\x01\n\x0bGetInstance\x12).google.cloud.redis.v1.GetInstanceRequest\x1a\x1f.google.cloud.redis.v1.Instance"<\x82\xd3\xe4\x93\x02/\x12-/v1/{name=projects/*/locations/*/instances/*}\xda\x41\x04name\x12\x89\x02\n\x0e\x43reateInstance\x12,.google.cloud.redis.v1.CreateInstanceRequest\x1a\x1d.google.longrunning.Operation"\xa9\x01\x82\xd3\xe4\x93\x02\x39"-/v1/{parent=projects/*/locations/*}/instances:\x08instance\xda\x41\x1bparent,instance_id,instance\xca\x41I\n\x1egoogle.cloud.redis.v1.Instance\x12\'google.cloud.redis.v1.OperationMetadata\x12\x8b\x02\n\x0eUpdateInstance\x12,.google.cloud.redis.v1.UpdateInstanceRequest\x1a\x1d.google.longrunning.Operation"\xab\x01\x82\xd3\xe4\x93\x02\x42\x32\x36/v1/{instance.name=projects/*/locations/*/instances/*}:\x08instance\xda\x41\x14update_mask,instance\xca\x41I\n\x1egoogle.cloud.redis.v1.Instance\x12\'google.cloud.redis.v1.OperationMetadata\x12\xff\x01\n\x0eImportInstance\x12,.google.cloud.redis.v1.ImportInstanceRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02\x39"4/v1/{name=projects/*/locations/*/instances/*}:import:\x01*\xda\x41\x11name,input_config\xca\x41I\n\x1egoogle.cloud.redis.v1.Instance\x12\'google.cloud.redis.v1.OperationMetadata\x12\x80\x02\n\x0e\x45xportInstance\x12,.google.cloud.redis.v1.ExportInstanceRequest\x1a\x1d.google.longrunning.Operation"\xa0\x01\x82\xd3\xe4\x93\x02\x39"4/v1/{name=projects/*/locations/*/instances/*}:export:\x01*\xda\x41\x12name,output_config\xca\x41I\n\x1egoogle.cloud.redis.v1.Instance\x12\'google.cloud.redis.v1.OperationMetadata\x12\x8d\x02\n\x10\x46\x61iloverInstance\x12..google.cloud.redis.v1.FailoverInstanceRequest\x1a\x1d.google.longrunning.Operation"\xa9\x01\x82\xd3\xe4\x93\x02;"6/v1/{name=projects/*/locations/*/instances/*}:failover:\x01*\xda\x41\x19name,data_protection_mode\xca\x41I\n\x1egoogle.cloud.redis.v1.Instance\x12\'google.cloud.redis.v1.OperationMetadata\x12\xde\x01\n\x0e\x44\x65leteInstance\x12,.google.cloud.redis.v1.DeleteInstanceRequest\x1a\x1d.google.longrunning.Operation"\x7f\x82\xd3\xe4\x93\x02/*-/v1/{name=projects/*/locations/*/instances/*}\xda\x41\x04name\xca\x41@\n\x15google.protobuf.Empty\x12\'google.cloud.redis.v1.OperationMetadata\x1aH\xca\x41\x14redis.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBs\n\x19\x63om.google.cloud.redis.v1B\x18\x43loudRedisServiceV1ProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/redis/v1;redisb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -84,8 +88,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=942, - serialized_end=1090, + serialized_start=1082, + serialized_end=1230, ) _sym_db.RegisterEnumDescriptor(_INSTANCE_STATE) @@ -111,8 +115,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1092, - serialized_end=1148, + serialized_start=1232, + serialized_end=1288, ) _sym_db.RegisterEnumDescriptor(_INSTANCE_TIER) @@ -146,8 +150,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2235, - serialized_end=2337, + serialized_start=2733, + serialized_end=2835, ) _sym_db.RegisterEnumDescriptor(_FAILOVERINSTANCEREQUEST_DATAPROTECTIONMODE) @@ -204,8 +208,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=841, - serialized_end=886, + serialized_start=981, + serialized_end=1026, ) _INSTANCE_REDISCONFIGSENTRY = _descriptor.Descriptor( @@ -260,8 +264,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=888, - serialized_end=939, + serialized_start=1028, + serialized_end=1079, ) _INSTANCE = _descriptor.Descriptor( @@ -286,7 +290,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -340,7 +344,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -358,7 +362,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -376,7 +380,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -394,7 +398,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -412,7 +416,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -430,7 +434,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -448,7 +452,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -466,7 +470,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -484,7 +488,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -502,7 +506,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -520,7 +524,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -538,7 +542,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -556,7 +560,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -574,7 +578,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -592,20 +596,22 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[_INSTANCE_LABELSENTRY, _INSTANCE_REDISCONFIGSENTRY], enum_types=[_INSTANCE_STATE, _INSTANCE_TIER], - serialized_options=None, + serialized_options=_b( + "\352A]\n\035redis.googleapis.com/Instance\022>> >>> client = redis_v1beta1.CloudRedisClient() >>> - >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') + >>> # TODO: Initialize `name`: + >>> name = '' >>> >>> # TODO: Initialize `input_config`: >>> input_config = {} @@ -770,7 +771,8 @@ def export_instance( >>> >>> client = redis_v1beta1.CloudRedisClient() >>> - >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') + >>> # TODO: Initialize `name`: + >>> name = '' >>> >>> # TODO: Initialize `output_config`: >>> output_config = {} @@ -853,7 +855,7 @@ def export_instance( def failover_instance( self, name, - data_protection_mode, + data_protection_mode=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -864,16 +866,12 @@ def failover_instance( Example: >>> from google.cloud import redis_v1beta1 - >>> from google.cloud.redis_v1beta1 import enums >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> - >>> # TODO: Initialize `data_protection_mode`: - >>> data_protection_mode = enums.FailoverInstanceRequest.DataProtectionMode.DATA_PROTECTION_MODE_UNSPECIFIED - >>> - >>> response = client.failover_instance(name, data_protection_mode) + >>> response = client.failover_instance(name) >>> >>> def callback(operation_future): ... # Handle result. diff --git a/redis/google/cloud/redis_v1beta1/proto/cloud_redis.proto b/redis/google/cloud/redis_v1beta1/proto/cloud_redis.proto index 699df83fa89a..eafd8eb4ea86 100644 --- a/redis/google/cloud/redis_v1beta1/proto/cloud_redis.proto +++ b/redis/google/cloud/redis_v1beta1/proto/cloud_redis.proto @@ -18,10 +18,12 @@ syntax = "proto3"; package google.cloud.redis.v1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; -import "google/api/client.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/redis/v1beta1;redis"; option java_multiple_files = true; @@ -45,6 +47,7 @@ option java_package = "com.google.cloud.redis.v1beta1"; // * `projects/redpepper-1290/locations/us-central1/instances/my-redis` service CloudRedis { option (google.api.default_host) = "redis.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; // Lists all Redis instances owned by a project in either the specified // location (region) or all locations. @@ -58,6 +61,7 @@ service CloudRedis { option (google.api.http) = { get: "/v1beta1/{parent=projects/*/locations/*}/instances" }; + option (google.api.method_signature) = "parent"; } // Gets the details of a specific Redis instance. @@ -65,6 +69,7 @@ service CloudRedis { option (google.api.http) = { get: "/v1beta1/{name=projects/*/locations/*/instances/*}" }; + option (google.api.method_signature) = "name"; } // Creates a Redis instance based on the specified tier and memory size. @@ -84,6 +89,11 @@ service CloudRedis { post: "/v1beta1/{parent=projects/*/locations/*}/instances" body: "instance" }; + option (google.api.method_signature) = "parent,instance_id,instance"; + option (google.longrunning.operation_info) = { + response_type: "google.cloud.redis.v1beta1.Instance" + metadata_type: "google.protobuf.Any" + }; } // Updates the metadata and configuration of a specific Redis instance. @@ -96,6 +106,11 @@ service CloudRedis { patch: "/v1beta1/{instance.name=projects/*/locations/*/instances/*}" body: "instance" }; + option (google.api.method_signature) = "update_mask,instance"; + option (google.longrunning.operation_info) = { + response_type: "google.cloud.redis.v1beta1.Instance" + metadata_type: "google.protobuf.Any" + }; } // Import a Redis RDB snapshot file from Cloud Storage into a Redis instance. @@ -111,6 +126,11 @@ service CloudRedis { post: "/v1beta1/{name=projects/*/locations/*/instances/*}:import" body: "*" }; + option (google.api.method_signature) = "name,input_config"; + option (google.longrunning.operation_info) = { + response_type: "google.cloud.redis.v1beta1.Instance" + metadata_type: "google.protobuf.Any" + }; } // Export Redis instance data into a Redis RDB format file in Cloud Storage. @@ -124,6 +144,11 @@ service CloudRedis { post: "/v1beta1/{name=projects/*/locations/*/instances/*}:export" body: "*" }; + option (google.api.method_signature) = "name,output_config"; + option (google.longrunning.operation_info) = { + response_type: "google.cloud.redis.v1beta1.Instance" + metadata_type: "google.protobuf.Any" + }; } // Initiates a failover of the master node to current replica node for a @@ -133,6 +158,11 @@ service CloudRedis { post: "/v1beta1/{name=projects/*/locations/*/instances/*}:failover" body: "*" }; + option (google.api.method_signature) = "name,data_protection_mode"; + option (google.longrunning.operation_info) = { + response_type: "google.cloud.redis.v1beta1.Instance" + metadata_type: "google.protobuf.Any" + }; } // Deletes a specific Redis instance. Instance stops serving and data is @@ -141,11 +171,21 @@ service CloudRedis { option (google.api.http) = { delete: "/v1beta1/{name=projects/*/locations/*/instances/*}" }; + option (google.api.method_signature) = "name"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "google.protobuf.Any" + }; } } // A Google Cloud Redis instance. message Instance { + option (google.api.resource) = { + type: "redis.googleapis.com/Instance" + pattern: "projects/{project}/locations/{location}/instances/{instance}" + }; + // Represents the different states of a Redis instance. enum State { // Not set. @@ -199,7 +239,7 @@ message Instance { // specific zone (or collection of zones for cross-zone instances) an instance // should be provisioned in. Refer to [location_id] and // [alternative_location_id] fields for more details. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // An arbitrary and optional user-provided name for the instance. string display_name = 2; @@ -212,12 +252,12 @@ message Instance { // instances will be created across two zones for protection against zonal // failures. If [alternative_location_id] is also provided, it must be // different from [location_id]. - string location_id = 4; + string location_id = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. Only applicable to STANDARD_HA tier which protects the instance // against zonal failures by provisioning it across two zones. If provided, it // must be a different zone from the one provided in [location_id]. - string alternative_location_id = 5; + string alternative_location_id = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. The version of Redis software. // If not provided, latest supported version will be used. Updating the @@ -226,37 +266,37 @@ message Instance { // // * `REDIS_4_0` for Redis 4.0 compatibility (default) // * `REDIS_3_2` for Redis 3.2 compatibility - string redis_version = 7; + string redis_version = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The CIDR range of internal addresses that are reserved for this // instance. If not provided, the service will choose an unused /29 block, // for example, 10.0.0.0/29 or 192.168.0.0/29. Ranges must be unique // and non-overlapping with existing subnets in an authorized network. - string reserved_ip_range = 9; + string reserved_ip_range = 9 [(google.api.field_behavior) = OPTIONAL]; // Output only. Hostname or IP address of the exposed Redis endpoint used by // clients to connect to the service. - string host = 10; + string host = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The port number of the exposed Redis endpoint. - int32 port = 11; + int32 port = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The current zone where the Redis endpoint is placed. For Basic // Tier instances, this will always be the same as the [location_id] // provided by the user at creation time. For Standard Tier instances, // this can be either [location_id] or [alternative_location_id] and can // change after a failover event. - string current_location_id = 12; + string current_location_id = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time the instance was created. - google.protobuf.Timestamp create_time = 13; + google.protobuf.Timestamp create_time = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The current state of this instance. - State state = 14; + State state = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Additional information about the current status of this // instance, if available. - string status_message = 15; + string status_message = 15 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. Redis configuration parameters, according to // http://redis.io/topics/config. Currently, the only supported parameters @@ -272,26 +312,26 @@ message Instance { // * activedefrag // * lfu-log-factor // * lfu-decay-time - map redis_configs = 16; + map redis_configs = 16 [(google.api.field_behavior) = OPTIONAL]; // Required. The service tier of the instance. - Tier tier = 17; + Tier tier = 17 [(google.api.field_behavior) = REQUIRED]; // Required. Redis memory size in GiB. - int32 memory_size_gb = 18; + int32 memory_size_gb = 18 [(google.api.field_behavior) = REQUIRED]; // Optional. The full name of the Google Compute Engine // [network](/compute/docs/networks-and-firewalls#networks) to which the // instance is connected. If left unspecified, the `default` network // will be used. - string authorized_network = 20; + string authorized_network = 20 [(google.api.field_behavior) = OPTIONAL]; // Output only. Cloud IAM identity used by import / export operations to // transfer data to/from Cloud Storage. Format is // "serviceAccount:". The value may change over time // for a given instance so should be checked before each import/export // operation. - string persistence_iam_identity = 21; + string persistence_iam_identity = 21 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Request for [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances]. @@ -299,7 +339,12 @@ message ListInstancesRequest { // Required. The resource name of the instance location using the form: // `projects/{project_id}/locations/{location_id}` // where `location_id` refers to a GCP region. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // The maximum number of items to return. // @@ -342,7 +387,12 @@ message GetInstanceRequest { // Required. Redis instance resource name using the form: // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` // where `location_id` refers to a GCP region. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "redis.googleapis.com/Instance" + } + ]; } // Request for [CreateInstance][google.cloud.redis.v1beta1.CloudRedis.CreateInstance]. @@ -350,7 +400,12 @@ message CreateInstanceRequest { // Required. The resource name of the instance location using the form: // `projects/{project_id}/locations/{location_id}` // where `location_id` refers to a GCP region. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Required. The logical name of the Redis instance in the customer project // with the following restrictions: @@ -360,10 +415,10 @@ message CreateInstanceRequest { // * Must be between 1-40 characters. // * Must end with a number or a letter. // * Must be unique within the customer project / location - string instance_id = 2; + string instance_id = 2 [(google.api.field_behavior) = REQUIRED]; // Required. A Redis [Instance] resource - Instance instance = 3; + Instance instance = 3 [(google.api.field_behavior) = REQUIRED]; } // Request for [UpdateInstance][google.cloud.redis.v1beta1.CloudRedis.UpdateInstance]. @@ -376,11 +431,11 @@ message UpdateInstanceRequest { // * `labels` // * `memorySizeGb` // * `redisConfig` - google.protobuf.FieldMask update_mask = 1; + google.protobuf.FieldMask update_mask = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Update description. // Only fields specified in update_mask are updated. - Instance instance = 2; + Instance instance = 2 [(google.api.field_behavior) = REQUIRED]; } // Request for [DeleteInstance][google.cloud.redis.v1beta1.CloudRedis.DeleteInstance]. @@ -388,13 +443,18 @@ message DeleteInstanceRequest { // Required. Redis instance resource name using the form: // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` // where `location_id` refers to a GCP region. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "redis.googleapis.com/Instance" + } + ]; } // The Cloud Storage location for the input content message GcsSource { // Required. Source data URI. (e.g. 'gs://my_bucket/my_object'). - string uri = 1; + string uri = 1 [(google.api.field_behavior) = REQUIRED]; } // The input content @@ -411,17 +471,17 @@ message ImportInstanceRequest { // Required. Redis instance resource name using the form: // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` // where `location_id` refers to a GCP region. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Specify data to be imported. - InputConfig input_config = 3; + InputConfig input_config = 3 [(google.api.field_behavior) = REQUIRED]; } // The Cloud Storage location for the output content message GcsDestination { // Required. Data destination URI (e.g. // 'gs://my_bucket/my_object'). Existing files will be overwritten. - string uri = 1; + string uri = 1 [(google.api.field_behavior) = REQUIRED]; } // The output content @@ -438,10 +498,10 @@ message ExportInstanceRequest { // Required. Redis instance resource name using the form: // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` // where `location_id` refers to a GCP region. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Specify data to be exported. - OutputConfig output_config = 3; + OutputConfig output_config = 3 [(google.api.field_behavior) = REQUIRED]; } // Request for [Failover][google.cloud.redis.v1beta1.CloudRedis.FailoverInstance]. @@ -464,11 +524,16 @@ message FailoverInstanceRequest { // Required. Redis instance resource name using the form: // `projects/{project_id}/locations/{location_id}/instances/{instance_id}` // where `location_id` refers to a GCP region. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "redis.googleapis.com/Instance" + } + ]; // Optional. Available data protection modes that the user can choose. If it's // unspecified, data protection mode will be LIMITED_DATA_LOSS by default. - DataProtectionMode data_protection_mode = 2; + DataProtectionMode data_protection_mode = 2 [(google.api.field_behavior) = OPTIONAL]; } // This location metadata represents additional configuration options for a @@ -480,7 +545,7 @@ message LocationMetadata { // by the lowercase ID of each zone, as defined by GCE. These keys can be // specified in `location_id` or `alternative_location_id` fields when // creating a Redis instance. - map available_zones = 1; + map available_zones = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Defines specific information for a particular zone. Currently empty and diff --git a/redis/google/cloud/redis_v1beta1/proto/cloud_redis_pb2.py b/redis/google/cloud/redis_v1beta1/proto/cloud_redis_pb2.py index 58869537cef1..24980450c6ec 100644 --- a/redis/google/cloud/redis_v1beta1/proto/cloud_redis_pb2.py +++ b/redis/google/cloud/redis_v1beta1/proto/cloud_redis_pb2.py @@ -16,12 +16,14 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.longrunning import ( operations_pb2 as google_dot_longrunning_dot_operations__pb2, ) from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -32,14 +34,16 @@ "\n\036com.google.cloud.redis.v1beta1B\032CloudRedisServiceBetaProtoP\001Z?google.golang.org/genproto/googleapis/cloud/redis/v1beta1;redis" ), serialized_pb=_b( - '\n2google/cloud/redis_v1beta1/proto/cloud_redis.proto\x12\x1agoogle.cloud.redis.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/api/client.proto"\xa8\x07\n\x08Instance\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12@\n\x06labels\x18\x03 \x03(\x0b\x32\x30.google.cloud.redis.v1beta1.Instance.LabelsEntry\x12\x13\n\x0blocation_id\x18\x04 \x01(\t\x12\x1f\n\x17\x61lternative_location_id\x18\x05 \x01(\t\x12\x15\n\rredis_version\x18\x07 \x01(\t\x12\x19\n\x11reserved_ip_range\x18\t \x01(\t\x12\x0c\n\x04host\x18\n \x01(\t\x12\x0c\n\x04port\x18\x0b \x01(\x05\x12\x1b\n\x13\x63urrent_location_id\x18\x0c \x01(\t\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x39\n\x05state\x18\x0e \x01(\x0e\x32*.google.cloud.redis.v1beta1.Instance.State\x12\x16\n\x0estatus_message\x18\x0f \x01(\t\x12M\n\rredis_configs\x18\x10 \x03(\x0b\x32\x36.google.cloud.redis.v1beta1.Instance.RedisConfigsEntry\x12\x37\n\x04tier\x18\x11 \x01(\x0e\x32).google.cloud.redis.v1beta1.Instance.Tier\x12\x16\n\x0ememory_size_gb\x18\x12 \x01(\x05\x12\x1a\n\x12\x61uthorized_network\x18\x14 \x01(\t\x12 \n\x18persistence_iam_identity\x18\x15 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x33\n\x11RedisConfigsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x94\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x0c\n\x08UPDATING\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\r\n\tREPAIRING\x10\x05\x12\x0f\n\x0bMAINTENANCE\x10\x06\x12\r\n\tIMPORTING\x10\x08\x12\x10\n\x0c\x46\x41ILING_OVER\x10\n"8\n\x04Tier\x12\x14\n\x10TIER_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x0f\n\x0bSTANDARD_HA\x10\x03"M\n\x14ListInstancesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"~\n\x15ListInstancesResponse\x12\x37\n\tinstances\x18\x01 \x03(\x0b\x32$.google.cloud.redis.v1beta1.Instance\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12\x13\n\x0bunreachable\x18\x03 \x03(\t""\n\x12GetInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"t\n\x15\x43reateInstanceRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x13\n\x0binstance_id\x18\x02 \x01(\t\x12\x36\n\x08instance\x18\x03 \x01(\x0b\x32$.google.cloud.redis.v1beta1.Instance"\x80\x01\n\x15UpdateInstanceRequest\x12/\n\x0bupdate_mask\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x36\n\x08instance\x18\x02 \x01(\x0b\x32$.google.cloud.redis.v1beta1.Instance"%\n\x15\x44\x65leteInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x18\n\tGcsSource\x12\x0b\n\x03uri\x18\x01 \x01(\t"T\n\x0bInputConfig\x12;\n\ngcs_source\x18\x01 \x01(\x0b\x32%.google.cloud.redis.v1beta1.GcsSourceH\x00\x42\x08\n\x06source"d\n\x15ImportInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12=\n\x0cinput_config\x18\x03 \x01(\x0b\x32\'.google.cloud.redis.v1beta1.InputConfig"\x1d\n\x0eGcsDestination\x12\x0b\n\x03uri\x18\x01 \x01(\t"d\n\x0cOutputConfig\x12\x45\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32*.google.cloud.redis.v1beta1.GcsDestinationH\x00\x42\r\n\x0b\x64\x65stination"f\n\x15\x45xportInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12?\n\routput_config\x18\x03 \x01(\x0b\x32(.google.cloud.redis.v1beta1.OutputConfig"\xf5\x01\n\x17\x46\x61iloverInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x64\n\x14\x64\x61ta_protection_mode\x18\x02 \x01(\x0e\x32\x46.google.cloud.redis.v1beta1.FailoverInstanceRequest.DataProtectionMode"f\n\x12\x44\x61taProtectionMode\x12$\n DATA_PROTECTION_MODE_UNSPECIFIED\x10\x00\x12\x15\n\x11LIMITED_DATA_LOSS\x10\x01\x12\x13\n\x0f\x46ORCE_DATA_LOSS\x10\x02"\xce\x01\n\x10LocationMetadata\x12Y\n\x0f\x61vailable_zones\x18\x01 \x03(\x0b\x32@.google.cloud.redis.v1beta1.LocationMetadata.AvailableZonesEntry\x1a_\n\x13\x41vailableZonesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32(.google.cloud.redis.v1beta1.ZoneMetadata:\x02\x38\x01"\x0e\n\x0cZoneMetadata2\x81\x0b\n\nCloudRedis\x12\xb0\x01\n\rListInstances\x12\x30.google.cloud.redis.v1beta1.ListInstancesRequest\x1a\x31.google.cloud.redis.v1beta1.ListInstancesResponse":\x82\xd3\xe4\x93\x02\x34\x12\x32/v1beta1/{parent=projects/*/locations/*}/instances\x12\x9f\x01\n\x0bGetInstance\x12..google.cloud.redis.v1beta1.GetInstanceRequest\x1a$.google.cloud.redis.v1beta1.Instance":\x82\xd3\xe4\x93\x02\x34\x12\x32/v1beta1/{name=projects/*/locations/*/instances/*}\x12\xa8\x01\n\x0e\x43reateInstance\x12\x31.google.cloud.redis.v1beta1.CreateInstanceRequest\x1a\x1d.google.longrunning.Operation"D\x82\xd3\xe4\x93\x02>"2/v1beta1/{parent=projects/*/locations/*}/instances:\x08instance\x12\xb1\x01\n\x0eUpdateInstance\x12\x31.google.cloud.redis.v1beta1.UpdateInstanceRequest\x1a\x1d.google.longrunning.Operation"M\x82\xd3\xe4\x93\x02G2;/v1beta1/{instance.name=projects/*/locations/*/instances/*}:\x08instance\x12\xa8\x01\n\x0eImportInstance\x12\x31.google.cloud.redis.v1beta1.ImportInstanceRequest\x1a\x1d.google.longrunning.Operation"D\x82\xd3\xe4\x93\x02>"9/v1beta1/{name=projects/*/locations/*/instances/*}:import:\x01*\x12\xa8\x01\n\x0e\x45xportInstance\x12\x31.google.cloud.redis.v1beta1.ExportInstanceRequest\x1a\x1d.google.longrunning.Operation"D\x82\xd3\xe4\x93\x02>"9/v1beta1/{name=projects/*/locations/*/instances/*}:export:\x01*\x12\xae\x01\n\x10\x46\x61iloverInstance\x12\x33.google.cloud.redis.v1beta1.FailoverInstanceRequest\x1a\x1d.google.longrunning.Operation"F\x82\xd3\xe4\x93\x02@";/v1beta1/{name=projects/*/locations/*/instances/*}:failover:\x01*\x12\x9e\x01\n\x0e\x44\x65leteInstance\x12\x31.google.cloud.redis.v1beta1.DeleteInstanceRequest\x1a\x1d.google.longrunning.Operation":\x82\xd3\xe4\x93\x02\x34*2/v1beta1/{name=projects/*/locations/*/instances/*}\x1a\x17\xca\x41\x14redis.googleapis.comB\x7f\n\x1e\x63om.google.cloud.redis.v1beta1B\x1a\x43loudRedisServiceBetaProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/redis/v1beta1;redisb\x06proto3' + '\n2google/cloud/redis_v1beta1/proto/cloud_redis.proto\x12\x1agoogle.cloud.redis.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a#google/longrunning/operations.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xda\x08\n\x08Instance\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12@\n\x06labels\x18\x03 \x03(\x0b\x32\x30.google.cloud.redis.v1beta1.Instance.LabelsEntry\x12\x18\n\x0blocation_id\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x61lternative_location_id\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x1a\n\rredis_version\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11reserved_ip_range\x18\t \x01(\tB\x03\xe0\x41\x01\x12\x11\n\x04host\x18\n \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04port\x18\x0b \x01(\x05\x42\x03\xe0\x41\x03\x12 \n\x13\x63urrent_location_id\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12\x34\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12>\n\x05state\x18\x0e \x01(\x0e\x32*.google.cloud.redis.v1beta1.Instance.StateB\x03\xe0\x41\x03\x12\x1b\n\x0estatus_message\x18\x0f \x01(\tB\x03\xe0\x41\x03\x12R\n\rredis_configs\x18\x10 \x03(\x0b\x32\x36.google.cloud.redis.v1beta1.Instance.RedisConfigsEntryB\x03\xe0\x41\x01\x12<\n\x04tier\x18\x11 \x01(\x0e\x32).google.cloud.redis.v1beta1.Instance.TierB\x03\xe0\x41\x02\x12\x1b\n\x0ememory_size_gb\x18\x12 \x01(\x05\x42\x03\xe0\x41\x02\x12\x1f\n\x12\x61uthorized_network\x18\x14 \x01(\tB\x03\xe0\x41\x01\x12%\n\x18persistence_iam_identity\x18\x15 \x01(\tB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x33\n\x11RedisConfigsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x94\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x0c\n\x08UPDATING\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\r\n\tREPAIRING\x10\x05\x12\x0f\n\x0bMAINTENANCE\x10\x06\x12\r\n\tIMPORTING\x10\x08\x12\x10\n\x0c\x46\x41ILING_OVER\x10\n"8\n\x04Tier\x12\x14\n\x10TIER_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x0f\n\x0bSTANDARD_HA\x10\x03:`\xea\x41]\n\x1dredis.googleapis.com/Instance\x12"2/v1beta1/{parent=projects/*/locations/*}/instances:\x08instance\xda\x41\x1bparent,instance_id,instance\xca\x41:\n#google.cloud.redis.v1beta1.Instance\x12\x13google.protobuf.Any\x12\x86\x02\n\x0eUpdateInstance\x12\x31.google.cloud.redis.v1beta1.UpdateInstanceRequest\x1a\x1d.google.longrunning.Operation"\xa1\x01\x82\xd3\xe4\x93\x02G2;/v1beta1/{instance.name=projects/*/locations/*/instances/*}:\x08instance\xda\x41\x14update_mask,instance\xca\x41:\n#google.cloud.redis.v1beta1.Instance\x12\x13google.protobuf.Any\x12\xfa\x01\n\x0eImportInstance\x12\x31.google.cloud.redis.v1beta1.ImportInstanceRequest\x1a\x1d.google.longrunning.Operation"\x95\x01\x82\xd3\xe4\x93\x02>"9/v1beta1/{name=projects/*/locations/*/instances/*}:import:\x01*\xda\x41\x11name,input_config\xca\x41:\n#google.cloud.redis.v1beta1.Instance\x12\x13google.protobuf.Any\x12\xfb\x01\n\x0e\x45xportInstance\x12\x31.google.cloud.redis.v1beta1.ExportInstanceRequest\x1a\x1d.google.longrunning.Operation"\x96\x01\x82\xd3\xe4\x93\x02>"9/v1beta1/{name=projects/*/locations/*/instances/*}:export:\x01*\xda\x41\x12name,output_config\xca\x41:\n#google.cloud.redis.v1beta1.Instance\x12\x13google.protobuf.Any\x12\x88\x02\n\x10\x46\x61iloverInstance\x12\x33.google.cloud.redis.v1beta1.FailoverInstanceRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02@";/v1beta1/{name=projects/*/locations/*/instances/*}:failover:\x01*\xda\x41\x19name,data_protection_mode\xca\x41:\n#google.cloud.redis.v1beta1.Instance\x12\x13google.protobuf.Any\x12\xd4\x01\n\x0e\x44\x65leteInstance\x12\x31.google.cloud.redis.v1beta1.DeleteInstanceRequest\x1a\x1d.google.longrunning.Operation"p\x82\xd3\xe4\x93\x02\x34*2/v1beta1/{name=projects/*/locations/*/instances/*}\xda\x41\x04name\xca\x41,\n\x15google.protobuf.Empty\x12\x13google.protobuf.Any\x1aH\xca\x41\x14redis.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x7f\n\x1e\x63om.google.cloud.redis.v1beta1B\x1a\x43loudRedisServiceBetaProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/redis/v1beta1;redisb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -84,8 +88,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=972, - serialized_end=1120, + serialized_start=1112, + serialized_end=1260, ) _sym_db.RegisterEnumDescriptor(_INSTANCE_STATE) @@ -111,8 +115,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1122, - serialized_end=1178, + serialized_start=1262, + serialized_end=1318, ) _sym_db.RegisterEnumDescriptor(_INSTANCE_TIER) @@ -146,8 +150,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2306, - serialized_end=2408, + serialized_start=2803, + serialized_end=2905, ) _sym_db.RegisterEnumDescriptor(_FAILOVERINSTANCEREQUEST_DATAPROTECTIONMODE) @@ -204,8 +208,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=871, - serialized_end=916, + serialized_start=1011, + serialized_end=1056, ) _INSTANCE_REDISCONFIGSENTRY = _descriptor.Descriptor( @@ -260,8 +264,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=918, - serialized_end=969, + serialized_start=1058, + serialized_end=1109, ) _INSTANCE = _descriptor.Descriptor( @@ -286,7 +290,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -340,7 +344,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -358,7 +362,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -376,7 +380,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -394,7 +398,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -412,7 +416,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -430,7 +434,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -448,7 +452,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -466,7 +470,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -484,7 +488,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -502,7 +506,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -520,7 +524,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -538,7 +542,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -556,7 +560,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -574,7 +578,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -592,20 +596,22 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[_INSTANCE_LABELSENTRY, _INSTANCE_REDISCONFIGSENTRY], enum_types=[_INSTANCE_STATE, _INSTANCE_TIER], - serialized_options=None, + serialized_options=_b( + "\352A]\n\035redis.googleapis.com/Instance\022"2/v1beta1/{parent=projects/*/locations/*}/instances:\010instance' + '\202\323\344\223\002>"2/v1beta1/{parent=projects/*/locations/*}/instances:\010instance\332A\033parent,instance_id,instance\312A:\n#google.cloud.redis.v1beta1.Instance\022\023google.protobuf.Any' ), ), _descriptor.MethodDescriptor( @@ -2021,7 +2073,7 @@ input_type=_UPDATEINSTANCEREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\002G2;/v1beta1/{instance.name=projects/*/locations/*/instances/*}:\010instance" + "\202\323\344\223\002G2;/v1beta1/{instance.name=projects/*/locations/*/instances/*}:\010instance\332A\024update_mask,instance\312A:\n#google.cloud.redis.v1beta1.Instance\022\023google.protobuf.Any" ), ), _descriptor.MethodDescriptor( @@ -2032,7 +2084,7 @@ input_type=_IMPORTINSTANCEREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002>"9/v1beta1/{name=projects/*/locations/*/instances/*}:import:\001*' + '\202\323\344\223\002>"9/v1beta1/{name=projects/*/locations/*/instances/*}:import:\001*\332A\021name,input_config\312A:\n#google.cloud.redis.v1beta1.Instance\022\023google.protobuf.Any' ), ), _descriptor.MethodDescriptor( @@ -2043,7 +2095,7 @@ input_type=_EXPORTINSTANCEREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002>"9/v1beta1/{name=projects/*/locations/*/instances/*}:export:\001*' + '\202\323\344\223\002>"9/v1beta1/{name=projects/*/locations/*/instances/*}:export:\001*\332A\022name,output_config\312A:\n#google.cloud.redis.v1beta1.Instance\022\023google.protobuf.Any' ), ), _descriptor.MethodDescriptor( @@ -2054,7 +2106,7 @@ input_type=_FAILOVERINSTANCEREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002@";/v1beta1/{name=projects/*/locations/*/instances/*}:failover:\001*' + '\202\323\344\223\002@";/v1beta1/{name=projects/*/locations/*/instances/*}:failover:\001*\332A\031name,data_protection_mode\312A:\n#google.cloud.redis.v1beta1.Instance\022\023google.protobuf.Any' ), ), _descriptor.MethodDescriptor( @@ -2065,7 +2117,7 @@ input_type=_DELETEINSTANCEREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\0024*2/v1beta1/{name=projects/*/locations/*/instances/*}" + "\202\323\344\223\0024*2/v1beta1/{name=projects/*/locations/*/instances/*}\332A\004name\312A,\n\025google.protobuf.Empty\022\023google.protobuf.Any" ), ), ], diff --git a/redis/synth.metadata b/redis/synth.metadata index d3e5f77ab007..1a7e4f97ab61 100644 --- a/redis/synth.metadata +++ b/redis/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:35:18.058175Z", + "updateTime": "2019-10-08T12:31:15.017267Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "122bdbf877ad87439f8dd9d1474a8e5dde188087", + "internalRef": "273381131" } }, { diff --git a/redis/tests/unit/gapic/v1/test_cloud_redis_client_v1.py b/redis/tests/unit/gapic/v1/test_cloud_redis_client_v1.py index b2c85652f882..b2ddde330d54 100644 --- a/redis/tests/unit/gapic/v1/test_cloud_redis_client_v1.py +++ b/redis/tests/unit/gapic/v1/test_cloud_redis_client_v1.py @@ -308,7 +308,7 @@ def test_update_instance(self): paths_element_2 = "memory_size_gb" paths = [paths_element, paths_element_2] update_mask = {"paths": paths} - display_name = " instance.memory_size_gb=4" + display_name = " instance.memory_size_gb=4" instance = {"display_name": display_name} response = client.update_instance(update_mask, instance) @@ -342,7 +342,7 @@ def test_update_instance_exception(self): paths_element_2 = "memory_size_gb" paths = [paths_element, paths_element_2] update_mask = {"paths": paths} - display_name = " instance.memory_size_gb=4" + display_name = " instance.memory_size_gb=4" instance = {"display_name": display_name} response = client.update_instance(update_mask, instance) @@ -393,7 +393,7 @@ def test_import_instance(self): client = redis_v1.CloudRedisClient() # Setup Request - name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") + name = "name3373707" input_config = {} response = client.import_instance(name, input_config) @@ -423,7 +423,7 @@ def test_import_instance_exception(self): client = redis_v1.CloudRedisClient() # Setup Request - name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") + name = "name3373707" input_config = {} response = client.import_instance(name, input_config) @@ -474,7 +474,7 @@ def test_export_instance(self): client = redis_v1.CloudRedisClient() # Setup Request - name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") + name = "name3373707" output_config = {} response = client.export_instance(name, output_config) @@ -504,7 +504,7 @@ def test_export_instance_exception(self): client = redis_v1.CloudRedisClient() # Setup Request - name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") + name = "name3373707" output_config = {} response = client.export_instance(name, output_config) @@ -556,18 +556,13 @@ def test_failover_instance(self): # Setup Request name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") - data_protection_mode = ( - enums.FailoverInstanceRequest.DataProtectionMode.DATA_PROTECTION_MODE_UNSPECIFIED - ) - response = client.failover_instance(name, data_protection_mode) + response = client.failover_instance(name) result = response.result() assert expected_response == result assert len(channel.requests) == 1 - expected_request = cloud_redis_pb2.FailoverInstanceRequest( - name=name, data_protection_mode=data_protection_mode - ) + expected_request = cloud_redis_pb2.FailoverInstanceRequest(name=name) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -588,11 +583,8 @@ def test_failover_instance_exception(self): # Setup Request name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") - data_protection_mode = ( - enums.FailoverInstanceRequest.DataProtectionMode.DATA_PROTECTION_MODE_UNSPECIFIED - ) - response = client.failover_instance(name, data_protection_mode) + response = client.failover_instance(name) exception = response.exception() assert exception.errors[0] == error diff --git a/redis/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py b/redis/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py index 36de8aa567b9..fb7646612152 100644 --- a/redis/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py +++ b/redis/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py @@ -395,7 +395,7 @@ def test_import_instance(self): client = redis_v1beta1.CloudRedisClient() # Setup Request - name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") + name = "name3373707" input_config = {} response = client.import_instance(name, input_config) @@ -425,7 +425,7 @@ def test_import_instance_exception(self): client = redis_v1beta1.CloudRedisClient() # Setup Request - name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") + name = "name3373707" input_config = {} response = client.import_instance(name, input_config) @@ -476,7 +476,7 @@ def test_export_instance(self): client = redis_v1beta1.CloudRedisClient() # Setup Request - name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") + name = "name3373707" output_config = {} response = client.export_instance(name, output_config) @@ -506,7 +506,7 @@ def test_export_instance_exception(self): client = redis_v1beta1.CloudRedisClient() # Setup Request - name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") + name = "name3373707" output_config = {} response = client.export_instance(name, output_config) @@ -558,18 +558,13 @@ def test_failover_instance(self): # Setup Request name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") - data_protection_mode = ( - enums.FailoverInstanceRequest.DataProtectionMode.DATA_PROTECTION_MODE_UNSPECIFIED - ) - response = client.failover_instance(name, data_protection_mode) + response = client.failover_instance(name) result = response.result() assert expected_response == result assert len(channel.requests) == 1 - expected_request = cloud_redis_pb2.FailoverInstanceRequest( - name=name, data_protection_mode=data_protection_mode - ) + expected_request = cloud_redis_pb2.FailoverInstanceRequest(name=name) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -590,11 +585,8 @@ def test_failover_instance_exception(self): # Setup Request name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") - data_protection_mode = ( - enums.FailoverInstanceRequest.DataProtectionMode.DATA_PROTECTION_MODE_UNSPECIFIED - ) - response = client.failover_instance(name, data_protection_mode) + response = client.failover_instance(name) exception = response.exception() assert exception.errors[0] == error diff --git a/resource_manager/CHANGELOG.md b/resource_manager/CHANGELOG.md index 3cb946992452..20619adfc37d 100644 --- a/resource_manager/CHANGELOG.md +++ b/resource_manager/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/google-cloud-resource-manager/#history +## 0.30.0 + +10-10-2019 11:38 PDT + + +### New Features +- Add `client_options` support. ([#9043](https://github.com/googleapis/google-cloud-python/pull/9043)) + +### Dependencies +- Pin minimum version of `google-cloud-core` to 1.0.3. ([#9043](https://github.com/googleapis/google-cloud-python/pull/9043)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 0.29.2 07-24-2019 17:25 PDT diff --git a/resource_manager/docs/conf.py b/resource_manager/docs/conf.py index 7a7904ebe73a..8b02af66f586 100644 --- a/resource_manager/docs/conf.py +++ b/resource_manager/docs/conf.py @@ -342,7 +342,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), } diff --git a/resource_manager/setup.py b/resource_manager/setup.py index f2f28c680f29..6cc22b82e864 100644 --- a/resource_manager/setup.py +++ b/resource_manager/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-resource-manager' description = 'Google Cloud Resource Manager API client library' -version = '0.29.2' +version = '0.30.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/runtimeconfig/CHANGELOG.md b/runtimeconfig/CHANGELOG.md index 3c930b633c60..f0438697f676 100644 --- a/runtimeconfig/CHANGELOG.md +++ b/runtimeconfig/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/google-cloud-runtimeconfig/#history +## 0.30.0 + +10-15-2019 06:53 PDT + + +### New Features +- Add `client_options` to client. ([#9045](https://github.com/googleapis/google-cloud-python/pull/9045)) + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Fix broken links in docs. ([#9148](https://github.com/googleapis/google-cloud-python/pull/9148)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 0.29.2 07-24-2019 17:26 PDT diff --git a/runtimeconfig/docs/conf.py b/runtimeconfig/docs/conf.py index b745c4631310..e9f8d9b949f3 100644 --- a/runtimeconfig/docs/conf.py +++ b/runtimeconfig/docs/conf.py @@ -342,7 +342,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), } diff --git a/runtimeconfig/setup.py b/runtimeconfig/setup.py index 34b92d567f2e..e519f14b6666 100644 --- a/runtimeconfig/setup.py +++ b/runtimeconfig/setup.py @@ -22,14 +22,14 @@ name = 'google-cloud-runtimeconfig' description = 'Google Cloud RuntimeConfig API client library' -version = '0.29.2' +version = '0.30.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 3 - Alpha' dependencies = [ - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", ] extras = { } diff --git a/scheduler/docs/conf.py b/scheduler/docs/conf.py index eb6601eacaeb..77d4360534a8 100644 --- a/scheduler/docs/conf.py +++ b/scheduler/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client.py b/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client.py index 68a4f3909189..bbedb85b3332 100644 --- a/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client.py +++ b/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client.py @@ -98,13 +98,6 @@ def location_path(cls, project, location): location=location, ) - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - def __init__( self, transport=None, @@ -251,9 +244,7 @@ def list_jobs( ... pass Args: - parent (str): Required. - - The location name. For example: + parent (str): Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -342,9 +333,7 @@ def get_job( >>> response = client.get_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -418,16 +407,12 @@ def create_job( >>> response = client.create_job(parent, job) Args: - parent (str): Required. - - The location name. For example: + parent (str): Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. - job (Union[dict, ~google.cloud.scheduler_v1.types.Job]): Required. - - The job to add. The user can optionally specify a name for the job in - ``name``. ``name`` cannot be the same as an existing job. If a name is - not specified then the system will generate a random unique name that - will be returned (``name``) in the response. + job (Union[dict, ~google.cloud.scheduler_v1.types.Job]): Required. The job to add. The user can optionally specify a name for the + job in ``name``. ``name`` cannot be the same as an existing job. If a + name is not specified then the system will generate a random unique name + that will be returned (``name``) in the response. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.scheduler_v1.types.Job` @@ -512,9 +497,7 @@ def update_job( >>> response = client.update_job(job, update_mask) Args: - job (Union[dict, ~google.cloud.scheduler_v1.types.Job]): Required. - - The new job properties. ``name`` must be specified. + job (Union[dict, ~google.cloud.scheduler_v1.types.Job]): Required. The new job properties. ``name`` must be specified. Output only fields cannot be modified using UpdateJob. Any value specified for an output only field will be ignored. @@ -593,9 +576,7 @@ def delete_job( >>> client.delete_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -667,9 +648,7 @@ def pause_job( >>> response = client.pause_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -744,9 +723,7 @@ def resume_job( >>> response = client.resume_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -819,9 +796,7 @@ def run_job( >>> response = client.run_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will diff --git a/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client_config.py b/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client_config.py index 31ccca1db159..da963b9d59b0 100644 --- a/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client_config.py +++ b/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client_config.py @@ -18,42 +18,42 @@ }, "methods": { "ListJobs": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "GetJob": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "CreateJob": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "UpdateJob": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "DeleteJob": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "PauseJob": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "ResumeJob": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "RunJob": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, diff --git a/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler.proto b/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler.proto index a68446235c38..89ce8cbd338c 100644 --- a/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler.proto +++ b/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler.proto @@ -18,6 +18,8 @@ syntax = "proto3"; package google.cloud.scheduler.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/cloud/scheduler/v1/job.proto"; import "google/protobuf/empty.proto"; @@ -32,11 +34,15 @@ option objc_class_prefix = "SCHEDULER"; // The Cloud Scheduler API allows external entities to reliably // schedule asynchronous jobs. service CloudScheduler { + option (google.api.default_host) = "cloudscheduler.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Lists jobs. rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { option (google.api.http) = { get: "/v1/{parent=projects/*/locations/*}/jobs" }; + option (google.api.method_signature) = "parent"; } // Gets a job. @@ -44,6 +50,7 @@ service CloudScheduler { option (google.api.http) = { get: "/v1/{name=projects/*/locations/*/jobs/*}" }; + option (google.api.method_signature) = "name"; } // Creates a job. @@ -52,6 +59,7 @@ service CloudScheduler { post: "/v1/{parent=projects/*/locations/*}/jobs" body: "job" }; + option (google.api.method_signature) = "parent,job"; } // Updates a job. @@ -68,6 +76,7 @@ service CloudScheduler { patch: "/v1/{job.name=projects/*/locations/*/jobs/*}" body: "job" }; + option (google.api.method_signature) = "job,update_mask"; } // Deletes a job. @@ -75,6 +84,7 @@ service CloudScheduler { option (google.api.http) = { delete: "/v1/{name=projects/*/locations/*/jobs/*}" }; + option (google.api.method_signature) = "name"; } // Pauses a job. @@ -89,6 +99,7 @@ service CloudScheduler { post: "/v1/{name=projects/*/locations/*/jobs/*}:pause" body: "*" }; + option (google.api.method_signature) = "name"; } // Resume a job. @@ -102,6 +113,7 @@ service CloudScheduler { post: "/v1/{name=projects/*/locations/*/jobs/*}:resume" body: "*" }; + option (google.api.method_signature) = "name"; } // Forces a job to run now. @@ -113,16 +125,20 @@ service CloudScheduler { post: "/v1/{name=projects/*/locations/*/jobs/*}:run" body: "*" }; + option (google.api.method_signature) = "name"; } } // Request message for listing jobs using [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. message ListJobsRequest { - // Required. - // - // The location name. For example: + // Required. The location name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "cloudscheduler.googleapis.com/Job" + } + ]; // Requested page size. // @@ -159,79 +175,94 @@ message ListJobsResponse { // Request message for [GetJob][google.cloud.scheduler.v1.CloudScheduler.GetJob]. message GetJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for [CreateJob][google.cloud.scheduler.v1.CloudScheduler.CreateJob]. message CreateJobRequest { - // Required. - // - // The location name. For example: + // Required. The location name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "cloudscheduler.googleapis.com/Job" + } + ]; - // Required. - // - // The job to add. The user can optionally specify a name for the + // Required. The job to add. The user can optionally specify a name for the // job in [name][google.cloud.scheduler.v1.Job.name]. [name][google.cloud.scheduler.v1.Job.name] cannot be the same as an // existing job. If a name is not specified then the system will // generate a random unique name that will be returned // ([name][google.cloud.scheduler.v1.Job.name]) in the response. - Job job = 2; + Job job = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for [UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob]. message UpdateJobRequest { - // Required. - // - // The new job properties. [name][google.cloud.scheduler.v1.Job.name] must be specified. + // Required. The new job properties. [name][google.cloud.scheduler.v1.Job.name] must be specified. // // Output only fields cannot be modified using UpdateJob. // Any value specified for an output only field will be ignored. - Job job = 1; + Job job = 1 [(google.api.field_behavior) = REQUIRED]; // A mask used to specify which fields of the job are being updated. - google.protobuf.FieldMask update_mask = 2; + google.protobuf.FieldMask update_mask = 2 + [(google.api.field_behavior) = REQUIRED]; } // Request message for deleting a job using // [DeleteJob][google.cloud.scheduler.v1.CloudScheduler.DeleteJob]. message DeleteJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for [PauseJob][google.cloud.scheduler.v1.CloudScheduler.PauseJob]. message PauseJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. message ResumeJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for forcing a job to run now using // [RunJob][google.cloud.scheduler.v1.CloudScheduler.RunJob]. message RunJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } diff --git a/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler_pb2.py b/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler_pb2.py index 109cb95585aa..4b7e2078b9bd 100644 --- a/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler_pb2.py +++ b/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler_pb2.py @@ -16,6 +16,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.scheduler_v1.proto import ( job_pb2 as google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2, @@ -32,10 +34,12 @@ "\n\035com.google.cloud.scheduler.v1B\016SchedulerProtoP\001ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler\242\002\tSCHEDULER" ), serialized_pb=_b( - '\n4google/cloud/scheduler_v1/proto/cloudscheduler.proto\x12\x19google.cloud.scheduler.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a)google/cloud/scheduler_v1/proto/job.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"H\n\x0fListJobsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"Y\n\x10ListJobsResponse\x12,\n\x04jobs\x18\x01 \x03(\x0b\x32\x1e.google.cloud.scheduler.v1.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x1d\n\rGetJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"O\n\x10\x43reateJobRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12+\n\x03job\x18\x02 \x01(\x0b\x32\x1e.google.cloud.scheduler.v1.Job"p\n\x10UpdateJobRequest\x12+\n\x03job\x18\x01 \x01(\x0b\x32\x1e.google.cloud.scheduler.v1.Job\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask" \n\x10\x44\x65leteJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x1f\n\x0fPauseJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t" \n\x10ResumeJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x1d\n\rRunJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x95\t\n\x0e\x43loudScheduler\x12\x95\x01\n\x08ListJobs\x12*.google.cloud.scheduler.v1.ListJobsRequest\x1a+.google.cloud.scheduler.v1.ListJobsResponse"0\x82\xd3\xe4\x93\x02*\x12(/v1/{parent=projects/*/locations/*}/jobs\x12\x84\x01\n\x06GetJob\x12(.google.cloud.scheduler.v1.GetJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"0\x82\xd3\xe4\x93\x02*\x12(/v1/{name=projects/*/locations/*/jobs/*}\x12\x8f\x01\n\tCreateJob\x12+.google.cloud.scheduler.v1.CreateJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"5\x82\xd3\xe4\x93\x02/"(/v1/{parent=projects/*/locations/*}/jobs:\x03job\x12\x93\x01\n\tUpdateJob\x12+.google.cloud.scheduler.v1.UpdateJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"9\x82\xd3\xe4\x93\x02\x33\x32,/v1/{job.name=projects/*/locations/*/jobs/*}:\x03job\x12\x82\x01\n\tDeleteJob\x12+.google.cloud.scheduler.v1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"0\x82\xd3\xe4\x93\x02**(/v1/{name=projects/*/locations/*/jobs/*}\x12\x91\x01\n\x08PauseJob\x12*.google.cloud.scheduler.v1.PauseJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"9\x82\xd3\xe4\x93\x02\x33"./v1/{name=projects/*/locations/*/jobs/*}:pause:\x01*\x12\x94\x01\n\tResumeJob\x12+.google.cloud.scheduler.v1.ResumeJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job":\x82\xd3\xe4\x93\x02\x34"//v1/{name=projects/*/locations/*/jobs/*}:resume:\x01*\x12\x8b\x01\n\x06RunJob\x12(.google.cloud.scheduler.v1.RunJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"7\x82\xd3\xe4\x93\x02\x31",/v1/{name=projects/*/locations/*/jobs/*}:run:\x01*B\x81\x01\n\x1d\x63om.google.cloud.scheduler.v1B\x0eSchedulerProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler\xa2\x02\tSCHEDULERb\x06proto3' + '\n4google/cloud/scheduler_v1/proto/cloudscheduler.proto\x12\x19google.cloud.scheduler.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a)google/cloud/scheduler_v1/proto/job.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"s\n\x0fListJobsRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!cloudscheduler.googleapis.com/Job\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"Y\n\x10ListJobsResponse\x12,\n\x04jobs\x18\x01 \x03(\x0b\x32\x1e.google.cloud.scheduler.v1.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\rGetJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"\x7f\n\x10\x43reateJobRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!cloudscheduler.googleapis.com/Job\x12\x30\n\x03job\x18\x02 \x01(\x0b\x32\x1e.google.cloud.scheduler.v1.JobB\x03\xe0\x41\x02"z\n\x10UpdateJobRequest\x12\x30\n\x03job\x18\x01 \x01(\x0b\x32\x1e.google.cloud.scheduler.v1.JobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"K\n\x10\x44\x65leteJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"J\n\x0fPauseJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"K\n\x10ResumeJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"H\n\rRunJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job2\xb3\n\n\x0e\x43loudScheduler\x12\x9e\x01\n\x08ListJobs\x12*.google.cloud.scheduler.v1.ListJobsRequest\x1a+.google.cloud.scheduler.v1.ListJobsResponse"9\x82\xd3\xe4\x93\x02*\x12(/v1/{parent=projects/*/locations/*}/jobs\xda\x41\x06parent\x12\x8b\x01\n\x06GetJob\x12(.google.cloud.scheduler.v1.GetJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"7\x82\xd3\xe4\x93\x02*\x12(/v1/{name=projects/*/locations/*/jobs/*}\xda\x41\x04name\x12\x9c\x01\n\tCreateJob\x12+.google.cloud.scheduler.v1.CreateJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"B\x82\xd3\xe4\x93\x02/"(/v1/{parent=projects/*/locations/*}/jobs:\x03job\xda\x41\nparent,job\x12\xa5\x01\n\tUpdateJob\x12+.google.cloud.scheduler.v1.UpdateJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"K\x82\xd3\xe4\x93\x02\x33\x32,/v1/{job.name=projects/*/locations/*/jobs/*}:\x03job\xda\x41\x0fjob,update_mask\x12\x89\x01\n\tDeleteJob\x12+.google.cloud.scheduler.v1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"7\x82\xd3\xe4\x93\x02**(/v1/{name=projects/*/locations/*/jobs/*}\xda\x41\x04name\x12\x98\x01\n\x08PauseJob\x12*.google.cloud.scheduler.v1.PauseJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"@\x82\xd3\xe4\x93\x02\x33"./v1/{name=projects/*/locations/*/jobs/*}:pause:\x01*\xda\x41\x04name\x12\x9b\x01\n\tResumeJob\x12+.google.cloud.scheduler.v1.ResumeJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"A\x82\xd3\xe4\x93\x02\x34"//v1/{name=projects/*/locations/*/jobs/*}:resume:\x01*\xda\x41\x04name\x12\x92\x01\n\x06RunJob\x12(.google.cloud.scheduler.v1.RunJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job">\x82\xd3\xe4\x93\x02\x31",/v1/{name=projects/*/locations/*/jobs/*}:run:\x01*\xda\x41\x04name\x1aQ\xca\x41\x1d\x63loudscheduler.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x81\x01\n\x1d\x63om.google.cloud.scheduler.v1B\x0eSchedulerProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler\xa2\x02\tSCHEDULERb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, @@ -66,7 +70,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\022!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -114,8 +120,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=246, - serialized_end=318, + serialized_start=304, + serialized_end=419, ) @@ -171,8 +177,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=320, - serialized_end=409, + serialized_start=421, + serialized_end=510, ) @@ -198,7 +204,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -210,8 +218,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=411, - serialized_end=440, + serialized_start=512, + serialized_end=584, ) @@ -237,7 +245,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\022!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -255,7 +265,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -267,8 +277,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=442, - serialized_end=521, + serialized_start=586, + serialized_end=713, ) @@ -294,7 +304,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -312,7 +322,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -324,8 +334,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=523, - serialized_end=635, + serialized_start=715, + serialized_end=837, ) @@ -351,7 +361,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -363,8 +375,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=637, - serialized_end=669, + serialized_start=839, + serialized_end=914, ) @@ -390,7 +402,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -402,8 +416,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=671, - serialized_end=702, + serialized_start=916, + serialized_end=990, ) @@ -429,7 +443,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -441,8 +457,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=704, - serialized_end=736, + serialized_start=992, + serialized_end=1067, ) @@ -468,7 +484,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -480,8 +498,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=738, - serialized_end=767, + serialized_start=1069, + serialized_end=1141, ) _LISTJOBSRESPONSE.fields_by_name[ @@ -519,7 +537,7 @@ Attributes: parent: - Required. The location name. For example: + Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. page_size: Requested page size. The maximum page size is 500. If @@ -583,7 +601,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.GetJobRequest) @@ -603,10 +621,10 @@ Attributes: parent: - Required. The location name. For example: + Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. job: - Required. The job to add. The user can optionally specify a + Required. The job to add. The user can optionally specify a name for the job in [name][google.cloud.scheduler.v1.Job.name]. [name][google.cloud.scheduler.v1.Job.name] cannot be the same @@ -631,7 +649,7 @@ Attributes: job: - Required. The new job properties. + Required. The new job properties. [name][google.cloud.scheduler.v1.Job.name] must be specified. Output only fields cannot be modified using UpdateJob. Any value specified for an output only field will be ignored. @@ -656,7 +674,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.DeleteJobRequest) @@ -676,7 +694,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.PauseJobRequest) @@ -696,7 +714,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.ResumeJobRequest) @@ -716,7 +734,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.RunJobRequest) @@ -726,15 +744,27 @@ DESCRIPTOR._options = None +_LISTJOBSREQUEST.fields_by_name["parent"]._options = None +_GETJOBREQUEST.fields_by_name["name"]._options = None +_CREATEJOBREQUEST.fields_by_name["parent"]._options = None +_CREATEJOBREQUEST.fields_by_name["job"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job"]._options = None +_UPDATEJOBREQUEST.fields_by_name["update_mask"]._options = None +_DELETEJOBREQUEST.fields_by_name["name"]._options = None +_PAUSEJOBREQUEST.fields_by_name["name"]._options = None +_RESUMEJOBREQUEST.fields_by_name["name"]._options = None +_RUNJOBREQUEST.fields_by_name["name"]._options = None _CLOUDSCHEDULER = _descriptor.ServiceDescriptor( name="CloudScheduler", full_name="google.cloud.scheduler.v1.CloudScheduler", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=770, - serialized_end=1943, + serialized_options=_b( + "\312A\035cloudscheduler.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1144, + serialized_end=2475, methods=[ _descriptor.MethodDescriptor( name="ListJobs", @@ -744,7 +774,7 @@ input_type=_LISTJOBSREQUEST, output_type=_LISTJOBSRESPONSE, serialized_options=_b( - "\202\323\344\223\002*\022(/v1/{parent=projects/*/locations/*}/jobs" + "\202\323\344\223\002*\022(/v1/{parent=projects/*/locations/*}/jobs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -755,7 +785,7 @@ input_type=_GETJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - "\202\323\344\223\002*\022(/v1/{name=projects/*/locations/*/jobs/*}" + "\202\323\344\223\002*\022(/v1/{name=projects/*/locations/*/jobs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -766,7 +796,7 @@ input_type=_CREATEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\002/"(/v1/{parent=projects/*/locations/*}/jobs:\003job' + '\202\323\344\223\002/"(/v1/{parent=projects/*/locations/*}/jobs:\003job\332A\nparent,job' ), ), _descriptor.MethodDescriptor( @@ -777,7 +807,7 @@ input_type=_UPDATEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - "\202\323\344\223\00232,/v1/{job.name=projects/*/locations/*/jobs/*}:\003job" + "\202\323\344\223\00232,/v1/{job.name=projects/*/locations/*/jobs/*}:\003job\332A\017job,update_mask" ), ), _descriptor.MethodDescriptor( @@ -788,7 +818,7 @@ input_type=_DELETEJOBREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002**(/v1/{name=projects/*/locations/*/jobs/*}" + "\202\323\344\223\002**(/v1/{name=projects/*/locations/*/jobs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -799,7 +829,7 @@ input_type=_PAUSEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0023"./v1/{name=projects/*/locations/*/jobs/*}:pause:\001*' + '\202\323\344\223\0023"./v1/{name=projects/*/locations/*/jobs/*}:pause:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -810,7 +840,7 @@ input_type=_RESUMEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0024"//v1/{name=projects/*/locations/*/jobs/*}:resume:\001*' + '\202\323\344\223\0024"//v1/{name=projects/*/locations/*/jobs/*}:resume:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -821,7 +851,7 @@ input_type=_RUNJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0021",/v1/{name=projects/*/locations/*/jobs/*}:run:\001*' + '\202\323\344\223\0021",/v1/{name=projects/*/locations/*/jobs/*}:run:\001*\332A\004name' ), ), ], diff --git a/scheduler/google/cloud/scheduler_v1/proto/job.proto b/scheduler/google/cloud/scheduler_v1/proto/job.proto index 60b47263151b..d26070266b18 100644 --- a/scheduler/google/cloud/scheduler_v1/proto/job.proto +++ b/scheduler/google/cloud/scheduler_v1/proto/job.proto @@ -17,12 +17,12 @@ syntax = "proto3"; package google.cloud.scheduler.v1; -import "google/api/annotations.proto"; import "google/api/resource.proto"; import "google/cloud/scheduler/v1/target.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; +import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler"; option java_multiple_files = true; @@ -32,6 +32,11 @@ option java_package = "com.google.cloud.scheduler.v1"; // Configuration for a job. // The maximum allowed size for a job is 100KB. message Job { + option (google.api.resource) = { + type: "cloudscheduler.googleapis.com/Job" + pattern: "projects/{project}/locations/{location}/jobs/{job}" + }; + // State of the job. enum State { // Unspecified state. diff --git a/scheduler/google/cloud/scheduler_v1/proto/job_pb2.py b/scheduler/google/cloud/scheduler_v1/proto/job_pb2.py index 7f5d3a791458..cfc36eecfcca 100644 --- a/scheduler/google/cloud/scheduler_v1/proto/job_pb2.py +++ b/scheduler/google/cloud/scheduler_v1/proto/job_pb2.py @@ -15,7 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.scheduler_v1.proto import ( target_pb2 as google_dot_cloud_dot_scheduler__v1_dot_proto_dot_target__pb2, @@ -23,6 +22,7 @@ from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -33,15 +33,15 @@ "\n\035com.google.cloud.scheduler.v1B\010JobProtoP\001ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler" ), serialized_pb=_b( - '\n)google/cloud/scheduler_v1/proto/job.proto\x12\x19google.cloud.scheduler.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/scheduler_v1/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xef\x05\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12@\n\rpubsub_target\x18\x04 \x01(\x0b\x32\'.google.cloud.scheduler.v1.PubsubTargetH\x00\x12P\n\x16\x61pp_engine_http_target\x18\x05 \x01(\x0b\x32..google.cloud.scheduler.v1.AppEngineHttpTargetH\x00\x12<\n\x0bhttp_target\x18\x06 \x01(\x0b\x32%.google.cloud.scheduler.v1.HttpTargetH\x00\x12\x10\n\x08schedule\x18\x14 \x01(\t\x12\x11\n\ttime_zone\x18\x15 \x01(\t\x12\x34\n\x10user_update_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x05state\x18\n \x01(\x0e\x32$.google.cloud.scheduler.v1.Job.State\x12"\n\x06status\x18\x0b \x01(\x0b\x32\x12.google.rpc.Status\x12\x31\n\rschedule_time\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11last_attempt_time\x18\x12 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x0cretry_config\x18\x13 \x01(\x0b\x32&.google.cloud.scheduler.v1.RetryConfig\x12\x33\n\x10\x61ttempt_deadline\x18\x16 \x01(\x0b\x32\x19.google.protobuf.Duration"X\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03\x12\x11\n\rUPDATE_FAILED\x10\x04\x42\x08\n\x06target"\xe2\x01\n\x0bRetryConfig\x12\x13\n\x0bretry_count\x18\x01 \x01(\x05\x12\x35\n\x12max_retry_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14min_backoff_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14max_backoff_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x05 \x01(\x05\x42o\n\x1d\x63om.google.cloud.scheduler.v1B\x08JobProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;schedulerb\x06proto3' + '\n)google/cloud/scheduler_v1/proto/job.proto\x12\x19google.cloud.scheduler.v1\x1a\x19google/api/resource.proto\x1a,google/cloud/scheduler_v1/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xcb\x06\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12@\n\rpubsub_target\x18\x04 \x01(\x0b\x32\'.google.cloud.scheduler.v1.PubsubTargetH\x00\x12P\n\x16\x61pp_engine_http_target\x18\x05 \x01(\x0b\x32..google.cloud.scheduler.v1.AppEngineHttpTargetH\x00\x12<\n\x0bhttp_target\x18\x06 \x01(\x0b\x32%.google.cloud.scheduler.v1.HttpTargetH\x00\x12\x10\n\x08schedule\x18\x14 \x01(\t\x12\x11\n\ttime_zone\x18\x15 \x01(\t\x12\x34\n\x10user_update_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x05state\x18\n \x01(\x0e\x32$.google.cloud.scheduler.v1.Job.State\x12"\n\x06status\x18\x0b \x01(\x0b\x32\x12.google.rpc.Status\x12\x31\n\rschedule_time\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11last_attempt_time\x18\x12 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x0cretry_config\x18\x13 \x01(\x0b\x32&.google.cloud.scheduler.v1.RetryConfig\x12\x33\n\x10\x61ttempt_deadline\x18\x16 \x01(\x0b\x32\x19.google.protobuf.Duration"X\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03\x12\x11\n\rUPDATE_FAILED\x10\x04:Z\xea\x41W\n!cloudscheduler.googleapis.com/Job\x12\x32projects/{project}/locations/{location}/jobs/{job}B\x08\n\x06target"\xe2\x01\n\x0bRetryConfig\x12\x13\n\x0bretry_count\x18\x01 \x01(\x05\x12\x35\n\x12max_retry_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14min_backoff_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14max_backoff_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x05 \x01(\x05\x42o\n\x1d\x63om.google.cloud.scheduler.v1B\x08JobProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;schedulerb\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_scheduler__v1_dot_proto_dot_target__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) @@ -343,7 +343,9 @@ extensions=[], nested_types=[], enum_types=[_JOB_STATE], - serialized_options=None, + serialized_options=_b( + "\352AW\n!cloudscheduler.googleapis.com/Job\0222projects/{project}/locations/{location}/jobs/{job}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -357,7 +359,7 @@ ) ], serialized_start=266, - serialized_end=1017, + serialized_end=1109, ) @@ -467,8 +469,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1020, - serialized_end=1246, + serialized_start=1112, + serialized_end=1338, ) _JOB.fields_by_name[ @@ -700,4 +702,5 @@ DESCRIPTOR._options = None +_JOB._options = None # @@protoc_insertion_point(module_scope) diff --git a/scheduler/google/cloud/scheduler_v1/proto/target.proto b/scheduler/google/cloud/scheduler_v1/proto/target.proto index e33b1558e53d..9a8f32f7c60e 100644 --- a/scheduler/google/cloud/scheduler_v1/proto/target.proto +++ b/scheduler/google/cloud/scheduler_v1/proto/target.proto @@ -17,8 +17,8 @@ syntax = "proto3"; package google.cloud.scheduler.v1; +import "google/api/resource.proto"; import "google/api/annotations.proto"; -import "google/protobuf/any.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler"; option java_multiple_files = true; @@ -32,9 +32,7 @@ option java_package = "com.google.cloud.scheduler.v1"; // constitutes a failed execution. For a redirected request, the response // returned by the redirected request is considered. message HttpTarget { - // Required. - // - // The full URI path that the request will be sent to. This string + // Required. The full URI path that the request will be sent to. This string // must begin with either "http://" or "https://". Some examples of // valid values for [uri][google.cloud.scheduler.v1.HttpTarget.uri] are: // `http://acme.com` and `https://acme.com/sales:8080`. Cloud Scheduler will @@ -77,8 +75,8 @@ message HttpTarget { // will be generated and attached as an `Authorization` header in the HTTP // request. // - // This type of authorization should be used when sending requests to a GCP - // endpoint. + // This type of authorization should generally only be used when calling + // Google APIs hosted on *.googleapis.com. OAuthToken oauth_token = 5; // If specified, an @@ -86,8 +84,9 @@ message HttpTarget { // token will be generated and attached as an `Authorization` header in the // HTTP request. // - // This type of authorization should be used when sending requests to third - // party endpoints or Cloud Run. + // This type of authorization can be used for many scenarios, including + // calling Cloud Run, or endpoints where you intend to validate the token + // yourself. OidcToken oidc_token = 6; } } @@ -162,16 +161,16 @@ message AppEngineHttpTarget { // Pub/Sub target. The job will be delivered by publishing a message to // the given Pub/Sub topic. message PubsubTarget { - // Required. - // - // The name of the Cloud Pub/Sub topic to which messages will + // Required. The name of the Cloud Pub/Sub topic to which messages will // be published when a job is delivered. The topic name must be in the // same format as required by PubSub's // [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest), // for example `projects/PROJECT_ID/topics/TOPIC_ID`. // // The topic must be in the same project as the Cloud Scheduler job. - string topic_name = 1; + string topic_name = 1 [(google.api.resource_reference) = { + type: "pubsub.googleapis.com/Topic" + }]; // The message payload for PubsubMessage. // @@ -315,8 +314,8 @@ enum HttpMethod { // Contains information needed for generating an // [OAuth token](https://developers.google.com/identity/protocols/OAuth2). -// This type of authorization should be used when sending requests to a GCP -// endpoint. +// This type of authorization should generally only be used when calling Google +// APIs hosted on *.googleapis.com. message OAuthToken { // [Service account email](https://cloud.google.com/iam/docs/service-accounts) // to be used for generating OAuth token. @@ -332,9 +331,10 @@ message OAuthToken { // Contains information needed for generating an // [OpenID Connect -// token](https://developers.google.com/identity/protocols/OpenIDConnect). This -// type of authorization should be used when sending requests to third party -// endpoints or Cloud Run. +// token](https://developers.google.com/identity/protocols/OpenIDConnect). +// This type of authorization can be used for many scenarios, including +// calling Cloud Run, or endpoints where you intend to validate the token +// yourself. message OidcToken { // [Service account email](https://cloud.google.com/iam/docs/service-accounts) // to be used for generating OIDC token. @@ -346,3 +346,11 @@ message OidcToken { // specified in target will be used. string audience = 2; } + +// The Pub/Sub Topic resource definition is in google/cloud/pubsub/v1/, +// but we do not import that proto directly; therefore, we redefine the +// pattern here. +option (google.api.resource_definition) = { + type: "pubsub.googleapis.com/Topic" + pattern: "projects/{project}/topics/{topic}" +}; diff --git a/scheduler/google/cloud/scheduler_v1/proto/target_pb2.py b/scheduler/google/cloud/scheduler_v1/proto/target_pb2.py index c6cb1357681c..6d25c3da2a4b 100644 --- a/scheduler/google/cloud/scheduler_v1/proto/target_pb2.py +++ b/scheduler/google/cloud/scheduler_v1/proto/target_pb2.py @@ -16,8 +16,8 @@ _sym_db = _symbol_database.Default() +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -25,14 +25,14 @@ package="google.cloud.scheduler.v1", syntax="proto3", serialized_options=_b( - "\n\035com.google.cloud.scheduler.v1B\013TargetProtoP\001ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler" + "\n\035com.google.cloud.scheduler.v1B\013TargetProtoP\001ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler\352A@\n\033pubsub.googleapis.com/Topic\022!projects/{project}/topics/{topic}" ), serialized_pb=_b( - '\n,google/cloud/scheduler_v1/proto/target.proto\x12\x19google.cloud.scheduler.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/protobuf/any.proto"\xea\x02\n\nHttpTarget\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12:\n\x0bhttp_method\x18\x02 \x01(\x0e\x32%.google.cloud.scheduler.v1.HttpMethod\x12\x43\n\x07headers\x18\x03 \x03(\x0b\x32\x32.google.cloud.scheduler.v1.HttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12<\n\x0boauth_token\x18\x05 \x01(\x0b\x32%.google.cloud.scheduler.v1.OAuthTokenH\x00\x12:\n\noidc_token\x18\x06 \x01(\x0b\x32$.google.cloud.scheduler.v1.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"\xbc\x02\n\x13\x41ppEngineHttpTarget\x12:\n\x0bhttp_method\x18\x01 \x01(\x0e\x32%.google.cloud.scheduler.v1.HttpMethod\x12G\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32+.google.cloud.scheduler.v1.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12L\n\x07headers\x18\x04 \x03(\x0b\x32;.google.cloud.scheduler.v1.AppEngineHttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb0\x01\n\x0cPubsubTarget\x12\x12\n\ntopic_name\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12K\n\nattributes\x18\x04 \x03(\x0b\x32\x37.google.cloud.scheduler.v1.PubsubTarget.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42r\n\x1d\x63om.google.cloud.scheduler.v1B\x0bTargetProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;schedulerb\x06proto3' + '\n,google/cloud/scheduler_v1/proto/target.proto\x12\x19google.cloud.scheduler.v1\x1a\x19google/api/resource.proto\x1a\x1cgoogle/api/annotations.proto"\xea\x02\n\nHttpTarget\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12:\n\x0bhttp_method\x18\x02 \x01(\x0e\x32%.google.cloud.scheduler.v1.HttpMethod\x12\x43\n\x07headers\x18\x03 \x03(\x0b\x32\x32.google.cloud.scheduler.v1.HttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12<\n\x0boauth_token\x18\x05 \x01(\x0b\x32%.google.cloud.scheduler.v1.OAuthTokenH\x00\x12:\n\noidc_token\x18\x06 \x01(\x0b\x32$.google.cloud.scheduler.v1.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"\xbc\x02\n\x13\x41ppEngineHttpTarget\x12:\n\x0bhttp_method\x18\x01 \x01(\x0e\x32%.google.cloud.scheduler.v1.HttpMethod\x12G\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32+.google.cloud.scheduler.v1.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12L\n\x07headers\x18\x04 \x03(\x0b\x32;.google.cloud.scheduler.v1.AppEngineHttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xd2\x01\n\x0cPubsubTarget\x12\x34\n\ntopic_name\x18\x01 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12K\n\nattributes\x18\x04 \x03(\x0b\x32\x37.google.cloud.scheduler.v1.PubsubTarget.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42\xb5\x01\n\x1d\x63om.google.cloud.scheduler.v1B\x0bTargetProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler\xea\x41@\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}b\x06proto3' ), dependencies=[ + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_protobuf_dot_any__pb2.DESCRIPTOR, ], ) @@ -73,8 +73,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1203, - serialized_end=1318, + serialized_start=1237, + serialized_end=1352, ) _sym_db.RegisterEnumDescriptor(_HTTPMETHOD) @@ -501,8 +501,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=944, - serialized_end=993, + serialized_start=978, + serialized_end=1027, ) _PUBSUBTARGET = _descriptor.Descriptor( @@ -527,7 +527,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\372A\035\n\033pubsub.googleapis.com/Topic"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -576,7 +576,7 @@ extension_ranges=[], oneofs=[], serialized_start=817, - serialized_end=993, + serialized_end=1027, ) @@ -668,8 +668,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=995, - serialized_end=1079, + serialized_start=1029, + serialized_end=1113, ) @@ -725,8 +725,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1081, - serialized_end=1139, + serialized_start=1115, + serialized_end=1173, ) @@ -782,8 +782,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1141, - serialized_end=1201, + serialized_start=1175, + serialized_end=1235, ) _HTTPTARGET_HEADERSENTRY.containing_type = _HTTPTARGET @@ -848,7 +848,7 @@ Attributes: uri: - Required. The full URI path that the request will be sent to. + Required. The full URI path that the request will be sent to. This string must begin with either "http://" or "https://". Some examples of valid values for [uri][google.cloud.scheduler.v1.HttpTarget.uri] are: @@ -887,14 +887,16 @@ If specified, an `OAuth token `__ will be generated and attached as an ``Authorization`` header - in the HTTP request. This type of authorization should be - used when sending requests to a GCP endpoint. + in the HTTP request. This type of authorization should + generally only be used when calling Google APIs hosted on + \*.googleapis.com. oidc_token: If specified, an `OIDC `__ token will be generated and attached as an ``Authorization`` header in the HTTP request. - This type of authorization should be used when sending - requests to third party endpoints or Cloud Run. + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend to + validate the token yourself. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.HttpTarget) ), @@ -999,7 +1001,7 @@ Attributes: topic_name: - Required. The name of the Cloud Pub/Sub topic to which + Required. The name of the Cloud Pub/Sub topic to which messages will be published when a job is delivered. The topic name must be in the same format as required by PubSub's `PublishRequest.name `__. This - type of authorization should be used when sending requests to a GCP - endpoint. + type of authorization should generally only be used when calling Google + APIs hosted on \*.googleapis.com. Attributes: @@ -1153,8 +1155,9 @@ __module__="google.cloud.scheduler_v1.proto.target_pb2", __doc__="""Contains information needed for generating an `OpenID Connect token `__. - This type of authorization should be used when sending requests to third - party endpoints or Cloud Run. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the token + yourself. Attributes: @@ -1178,4 +1181,5 @@ _HTTPTARGET_HEADERSENTRY._options = None _APPENGINEHTTPTARGET_HEADERSENTRY._options = None _PUBSUBTARGET_ATTRIBUTESENTRY._options = None +_PUBSUBTARGET.fields_by_name["topic_name"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client.py b/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client.py index 50879081907b..446baffa4c41 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client.py +++ b/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client.py @@ -100,13 +100,6 @@ def location_path(cls, project, location): location=location, ) - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - def __init__( self, transport=None, @@ -253,9 +246,7 @@ def list_jobs( ... pass Args: - parent (str): Required. - - The location name. For example: + parent (str): Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -344,9 +335,7 @@ def get_job( >>> response = client.get_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -420,16 +409,12 @@ def create_job( >>> response = client.create_job(parent, job) Args: - parent (str): Required. - - The location name. For example: + parent (str): Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. - job (Union[dict, ~google.cloud.scheduler_v1beta1.types.Job]): Required. - - The job to add. The user can optionally specify a name for the job in - ``name``. ``name`` cannot be the same as an existing job. If a name is - not specified then the system will generate a random unique name that - will be returned (``name``) in the response. + job (Union[dict, ~google.cloud.scheduler_v1beta1.types.Job]): Required. The job to add. The user can optionally specify a name for the + job in ``name``. ``name`` cannot be the same as an existing job. If a + name is not specified then the system will generate a random unique name + that will be returned (``name``) in the response. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.scheduler_v1beta1.types.Job` @@ -511,9 +496,7 @@ def update_job( >>> response = client.update_job(job) Args: - job (Union[dict, ~google.cloud.scheduler_v1beta1.types.Job]): Required. - - The new job properties. ``name`` must be specified. + job (Union[dict, ~google.cloud.scheduler_v1beta1.types.Job]): Required. The new job properties. ``name`` must be specified. Output only fields cannot be modified using UpdateJob. Any value specified for an output only field will be ignored. @@ -592,9 +575,7 @@ def delete_job( >>> client.delete_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -666,9 +647,7 @@ def pause_job( >>> response = client.pause_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -743,9 +722,7 @@ def resume_job( >>> response = client.resume_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -818,9 +795,7 @@ def run_job( >>> response = client.run_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will diff --git a/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client_config.py b/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client_config.py index d972bce85f76..f2dce757b3ec 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client_config.py +++ b/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client_config.py @@ -18,42 +18,42 @@ }, "methods": { "ListJobs": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "GetJob": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "CreateJob": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "UpdateJob": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "DeleteJob": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "PauseJob": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "ResumeJob": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "RunJob": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, diff --git a/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler.proto b/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler.proto index 4c1d9661839e..4f86b7a56218 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler.proto +++ b/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler.proto @@ -18,6 +18,8 @@ syntax = "proto3"; package google.cloud.scheduler.v1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/cloud/scheduler/v1beta1/job.proto"; import "google/protobuf/empty.proto"; @@ -32,11 +34,15 @@ option objc_class_prefix = "SCHEDULER"; // The Cloud Scheduler API allows external entities to reliably // schedule asynchronous jobs. service CloudScheduler { + option (google.api.default_host) = "cloudscheduler.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Lists jobs. rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { option (google.api.http) = { get: "/v1beta1/{parent=projects/*/locations/*}/jobs" }; + option (google.api.method_signature) = "parent"; } // Gets a job. @@ -44,6 +50,7 @@ service CloudScheduler { option (google.api.http) = { get: "/v1beta1/{name=projects/*/locations/*/jobs/*}" }; + option (google.api.method_signature) = "name"; } // Creates a job. @@ -52,6 +59,7 @@ service CloudScheduler { post: "/v1beta1/{parent=projects/*/locations/*}/jobs" body: "job" }; + option (google.api.method_signature) = "parent,job"; } // Updates a job. @@ -68,6 +76,7 @@ service CloudScheduler { patch: "/v1beta1/{job.name=projects/*/locations/*/jobs/*}" body: "job" }; + option (google.api.method_signature) = "job,update_mask"; } // Deletes a job. @@ -75,6 +84,7 @@ service CloudScheduler { option (google.api.http) = { delete: "/v1beta1/{name=projects/*/locations/*/jobs/*}" }; + option (google.api.method_signature) = "name"; } // Pauses a job. @@ -89,6 +99,7 @@ service CloudScheduler { post: "/v1beta1/{name=projects/*/locations/*/jobs/*}:pause" body: "*" }; + option (google.api.method_signature) = "name"; } // Resume a job. @@ -102,6 +113,7 @@ service CloudScheduler { post: "/v1beta1/{name=projects/*/locations/*/jobs/*}:resume" body: "*" }; + option (google.api.method_signature) = "name"; } // Forces a job to run now. @@ -113,16 +125,20 @@ service CloudScheduler { post: "/v1beta1/{name=projects/*/locations/*/jobs/*}:run" body: "*" }; + option (google.api.method_signature) = "name"; } } // Request message for listing jobs using [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. message ListJobsRequest { - // Required. - // - // The location name. For example: + // Required. The location name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "cloudscheduler.googleapis.com/Job" + } + ]; // Requested page size. // @@ -159,40 +175,42 @@ message ListJobsResponse { // Request message for [GetJob][google.cloud.scheduler.v1beta1.CloudScheduler.GetJob]. message GetJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for [CreateJob][google.cloud.scheduler.v1beta1.CloudScheduler.CreateJob]. message CreateJobRequest { - // Required. - // - // The location name. For example: + // Required. The location name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "cloudscheduler.googleapis.com/Job" + } + ]; - // Required. - // - // The job to add. The user can optionally specify a name for the + // Required. The job to add. The user can optionally specify a name for the // job in [name][google.cloud.scheduler.v1beta1.Job.name]. [name][google.cloud.scheduler.v1beta1.Job.name] cannot be the same as an // existing job. If a name is not specified then the system will // generate a random unique name that will be returned // ([name][google.cloud.scheduler.v1beta1.Job.name]) in the response. - Job job = 2; + Job job = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for [UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob]. message UpdateJobRequest { - // Required. - // - // The new job properties. [name][google.cloud.scheduler.v1beta1.Job.name] must be specified. + // Required. The new job properties. [name][google.cloud.scheduler.v1beta1.Job.name] must be specified. // // Output only fields cannot be modified using UpdateJob. // Any value specified for an output only field will be ignored. - Job job = 1; + Job job = 1 [(google.api.field_behavior) = REQUIRED]; // A mask used to specify which fields of the job are being updated. google.protobuf.FieldMask update_mask = 2; @@ -201,37 +219,49 @@ message UpdateJobRequest { // Request message for deleting a job using // [DeleteJob][google.cloud.scheduler.v1beta1.CloudScheduler.DeleteJob]. message DeleteJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for [PauseJob][google.cloud.scheduler.v1beta1.CloudScheduler.PauseJob]. message PauseJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. message ResumeJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for forcing a job to run now using // [RunJob][google.cloud.scheduler.v1beta1.CloudScheduler.RunJob]. message RunJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } diff --git a/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler_pb2.py b/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler_pb2.py index f85e45704dc3..3ce738ced993 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler_pb2.py +++ b/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler_pb2.py @@ -16,6 +16,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.scheduler_v1beta1.proto import ( job_pb2 as google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2, @@ -32,10 +34,12 @@ '\n"com.google.cloud.scheduler.v1beta1B\016SchedulerProtoP\001ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler\242\002\tSCHEDULER' ), serialized_pb=_b( - '\n9google/cloud/scheduler_v1beta1/proto/cloudscheduler.proto\x12\x1egoogle.cloud.scheduler.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a.google/cloud/scheduler_v1beta1/proto/job.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"H\n\x0fListJobsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"^\n\x10ListJobsResponse\x12\x31\n\x04jobs\x18\x01 \x03(\x0b\x32#.google.cloud.scheduler.v1beta1.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x1d\n\rGetJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"T\n\x10\x43reateJobRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x30\n\x03job\x18\x02 \x01(\x0b\x32#.google.cloud.scheduler.v1beta1.Job"u\n\x10UpdateJobRequest\x12\x30\n\x03job\x18\x01 \x01(\x0b\x32#.google.cloud.scheduler.v1beta1.Job\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask" \n\x10\x44\x65leteJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x1f\n\x0fPauseJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t" \n\x10ResumeJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x1d\n\rRunJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x88\n\n\x0e\x43loudScheduler\x12\xa4\x01\n\x08ListJobs\x12/.google.cloud.scheduler.v1beta1.ListJobsRequest\x1a\x30.google.cloud.scheduler.v1beta1.ListJobsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1beta1/{parent=projects/*/locations/*}/jobs\x12\x93\x01\n\x06GetJob\x12-.google.cloud.scheduler.v1beta1.GetJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"5\x82\xd3\xe4\x93\x02/\x12-/v1beta1/{name=projects/*/locations/*/jobs/*}\x12\x9e\x01\n\tCreateJob\x12\x30.google.cloud.scheduler.v1beta1.CreateJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job":\x82\xd3\xe4\x93\x02\x34"-/v1beta1/{parent=projects/*/locations/*}/jobs:\x03job\x12\xa2\x01\n\tUpdateJob\x12\x30.google.cloud.scheduler.v1beta1.UpdateJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job">\x82\xd3\xe4\x93\x02\x38\x32\x31/v1beta1/{job.name=projects/*/locations/*/jobs/*}:\x03job\x12\x8c\x01\n\tDeleteJob\x12\x30.google.cloud.scheduler.v1beta1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1beta1/{name=projects/*/locations/*/jobs/*}\x12\xa0\x01\n\x08PauseJob\x12/.google.cloud.scheduler.v1beta1.PauseJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job">\x82\xd3\xe4\x93\x02\x38"3/v1beta1/{name=projects/*/locations/*/jobs/*}:pause:\x01*\x12\xa3\x01\n\tResumeJob\x12\x30.google.cloud.scheduler.v1beta1.ResumeJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"?\x82\xd3\xe4\x93\x02\x39"4/v1beta1/{name=projects/*/locations/*/jobs/*}:resume:\x01*\x12\x9a\x01\n\x06RunJob\x12-.google.cloud.scheduler.v1beta1.RunJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"<\x82\xd3\xe4\x93\x02\x36"1/v1beta1/{name=projects/*/locations/*/jobs/*}:run:\x01*B\x8b\x01\n"com.google.cloud.scheduler.v1beta1B\x0eSchedulerProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler\xa2\x02\tSCHEDULERb\x06proto3' + '\n9google/cloud/scheduler_v1beta1/proto/cloudscheduler.proto\x12\x1egoogle.cloud.scheduler.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a.google/cloud/scheduler_v1beta1/proto/job.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"s\n\x0fListJobsRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!cloudscheduler.googleapis.com/Job\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"^\n\x10ListJobsResponse\x12\x31\n\x04jobs\x18\x01 \x03(\x0b\x32#.google.cloud.scheduler.v1beta1.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\rGetJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"\x84\x01\n\x10\x43reateJobRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!cloudscheduler.googleapis.com/Job\x12\x35\n\x03job\x18\x02 \x01(\x0b\x32#.google.cloud.scheduler.v1beta1.JobB\x03\xe0\x41\x02"z\n\x10UpdateJobRequest\x12\x35\n\x03job\x18\x01 \x01(\x0b\x32#.google.cloud.scheduler.v1beta1.JobB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x10\x44\x65leteJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"J\n\x0fPauseJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"K\n\x10ResumeJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"H\n\rRunJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job2\xa6\x0b\n\x0e\x43loudScheduler\x12\xad\x01\n\x08ListJobs\x12/.google.cloud.scheduler.v1beta1.ListJobsRequest\x1a\x30.google.cloud.scheduler.v1beta1.ListJobsResponse">\x82\xd3\xe4\x93\x02/\x12-/v1beta1/{parent=projects/*/locations/*}/jobs\xda\x41\x06parent\x12\x9a\x01\n\x06GetJob\x12-.google.cloud.scheduler.v1beta1.GetJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"<\x82\xd3\xe4\x93\x02/\x12-/v1beta1/{name=projects/*/locations/*/jobs/*}\xda\x41\x04name\x12\xab\x01\n\tCreateJob\x12\x30.google.cloud.scheduler.v1beta1.CreateJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"G\x82\xd3\xe4\x93\x02\x34"-/v1beta1/{parent=projects/*/locations/*}/jobs:\x03job\xda\x41\nparent,job\x12\xb4\x01\n\tUpdateJob\x12\x30.google.cloud.scheduler.v1beta1.UpdateJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"P\x82\xd3\xe4\x93\x02\x38\x32\x31/v1beta1/{job.name=projects/*/locations/*/jobs/*}:\x03job\xda\x41\x0fjob,update_mask\x12\x93\x01\n\tDeleteJob\x12\x30.google.cloud.scheduler.v1beta1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"<\x82\xd3\xe4\x93\x02/*-/v1beta1/{name=projects/*/locations/*/jobs/*}\xda\x41\x04name\x12\xa7\x01\n\x08PauseJob\x12/.google.cloud.scheduler.v1beta1.PauseJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"E\x82\xd3\xe4\x93\x02\x38"3/v1beta1/{name=projects/*/locations/*/jobs/*}:pause:\x01*\xda\x41\x04name\x12\xaa\x01\n\tResumeJob\x12\x30.google.cloud.scheduler.v1beta1.ResumeJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"F\x82\xd3\xe4\x93\x02\x39"4/v1beta1/{name=projects/*/locations/*/jobs/*}:resume:\x01*\xda\x41\x04name\x12\xa1\x01\n\x06RunJob\x12-.google.cloud.scheduler.v1beta1.RunJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"C\x82\xd3\xe4\x93\x02\x36"1/v1beta1/{name=projects/*/locations/*/jobs/*}:run:\x01*\xda\x41\x04name\x1aQ\xca\x41\x1d\x63loudscheduler.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x8b\x01\n"com.google.cloud.scheduler.v1beta1B\x0eSchedulerProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler\xa2\x02\tSCHEDULERb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, @@ -66,7 +70,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\022!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -114,8 +120,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=261, - serialized_end=333, + serialized_start=319, + serialized_end=434, ) @@ -171,8 +177,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=335, - serialized_end=429, + serialized_start=436, + serialized_end=530, ) @@ -198,7 +204,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -210,8 +218,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=431, - serialized_end=460, + serialized_start=532, + serialized_end=604, ) @@ -237,7 +245,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\022!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -255,7 +265,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -267,8 +277,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=462, - serialized_end=546, + serialized_start=607, + serialized_end=739, ) @@ -294,7 +304,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -324,8 +334,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=548, - serialized_end=665, + serialized_start=741, + serialized_end=863, ) @@ -351,7 +361,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -363,8 +375,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=667, - serialized_end=699, + serialized_start=865, + serialized_end=940, ) @@ -390,7 +402,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -402,8 +416,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=701, - serialized_end=732, + serialized_start=942, + serialized_end=1016, ) @@ -429,7 +443,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -441,8 +457,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=734, - serialized_end=766, + serialized_start=1018, + serialized_end=1093, ) @@ -468,7 +484,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -480,8 +498,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=768, - serialized_end=797, + serialized_start=1095, + serialized_end=1167, ) _LISTJOBSRESPONSE.fields_by_name[ @@ -519,7 +537,7 @@ Attributes: parent: - Required. The location name. For example: + Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. page_size: Requested page size. The maximum page size is 500. If @@ -582,7 +600,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.GetJobRequest) @@ -602,10 +620,10 @@ Attributes: parent: - Required. The location name. For example: + Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. job: - Required. The job to add. The user can optionally specify a + Required. The job to add. The user can optionally specify a name for the job in [name][google.cloud.scheduler.v1beta1.Job.name]. [name][google.cloud.scheduler.v1beta1.Job.name] cannot be the @@ -631,7 +649,7 @@ Attributes: job: - Required. The new job properties. + Required. The new job properties. [name][google.cloud.scheduler.v1beta1.Job.name] must be specified. Output only fields cannot be modified using UpdateJob. Any value specified for an output only field will @@ -657,7 +675,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.DeleteJobRequest) @@ -677,7 +695,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.PauseJobRequest) @@ -697,7 +715,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.ResumeJobRequest) @@ -717,7 +735,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.RunJobRequest) @@ -727,15 +745,26 @@ DESCRIPTOR._options = None +_LISTJOBSREQUEST.fields_by_name["parent"]._options = None +_GETJOBREQUEST.fields_by_name["name"]._options = None +_CREATEJOBREQUEST.fields_by_name["parent"]._options = None +_CREATEJOBREQUEST.fields_by_name["job"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job"]._options = None +_DELETEJOBREQUEST.fields_by_name["name"]._options = None +_PAUSEJOBREQUEST.fields_by_name["name"]._options = None +_RESUMEJOBREQUEST.fields_by_name["name"]._options = None +_RUNJOBREQUEST.fields_by_name["name"]._options = None _CLOUDSCHEDULER = _descriptor.ServiceDescriptor( name="CloudScheduler", full_name="google.cloud.scheduler.v1beta1.CloudScheduler", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=800, - serialized_end=2088, + serialized_options=_b( + "\312A\035cloudscheduler.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1170, + serialized_end=2616, methods=[ _descriptor.MethodDescriptor( name="ListJobs", @@ -745,7 +774,7 @@ input_type=_LISTJOBSREQUEST, output_type=_LISTJOBSRESPONSE, serialized_options=_b( - "\202\323\344\223\002/\022-/v1beta1/{parent=projects/*/locations/*}/jobs" + "\202\323\344\223\002/\022-/v1beta1/{parent=projects/*/locations/*}/jobs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -756,7 +785,7 @@ input_type=_GETJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - "\202\323\344\223\002/\022-/v1beta1/{name=projects/*/locations/*/jobs/*}" + "\202\323\344\223\002/\022-/v1beta1/{name=projects/*/locations/*/jobs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -767,7 +796,7 @@ input_type=_CREATEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0024"-/v1beta1/{parent=projects/*/locations/*}/jobs:\003job' + '\202\323\344\223\0024"-/v1beta1/{parent=projects/*/locations/*}/jobs:\003job\332A\nparent,job' ), ), _descriptor.MethodDescriptor( @@ -778,7 +807,7 @@ input_type=_UPDATEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - "\202\323\344\223\002821/v1beta1/{job.name=projects/*/locations/*/jobs/*}:\003job" + "\202\323\344\223\002821/v1beta1/{job.name=projects/*/locations/*/jobs/*}:\003job\332A\017job,update_mask" ), ), _descriptor.MethodDescriptor( @@ -789,7 +818,7 @@ input_type=_DELETEJOBREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002/*-/v1beta1/{name=projects/*/locations/*/jobs/*}" + "\202\323\344\223\002/*-/v1beta1/{name=projects/*/locations/*/jobs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -800,7 +829,7 @@ input_type=_PAUSEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0028"3/v1beta1/{name=projects/*/locations/*/jobs/*}:pause:\001*' + '\202\323\344\223\0028"3/v1beta1/{name=projects/*/locations/*/jobs/*}:pause:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -811,7 +840,7 @@ input_type=_RESUMEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0029"4/v1beta1/{name=projects/*/locations/*/jobs/*}:resume:\001*' + '\202\323\344\223\0029"4/v1beta1/{name=projects/*/locations/*/jobs/*}:resume:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -822,7 +851,7 @@ input_type=_RUNJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0026"1/v1beta1/{name=projects/*/locations/*/jobs/*}:run:\001*' + '\202\323\344\223\0026"1/v1beta1/{name=projects/*/locations/*/jobs/*}:run:\001*\332A\004name' ), ), ], diff --git a/scheduler/google/cloud/scheduler_v1beta1/proto/job.proto b/scheduler/google/cloud/scheduler_v1beta1/proto/job.proto index ddf910b03384..ddfda31eddc2 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/proto/job.proto +++ b/scheduler/google/cloud/scheduler_v1beta1/proto/job.proto @@ -17,12 +17,12 @@ syntax = "proto3"; package google.cloud.scheduler.v1beta1; -import "google/api/annotations.proto"; import "google/api/resource.proto"; import "google/cloud/scheduler/v1beta1/target.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; +import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler"; option java_multiple_files = true; @@ -32,6 +32,11 @@ option java_package = "com.google.cloud.scheduler.v1beta1"; // Configuration for a job. // The maximum allowed size for a job is 100KB. message Job { + option (google.api.resource) = { + type: "cloudscheduler.googleapis.com/Job" + pattern: "projects/{project}/locations/{location}/jobs/{job}" + }; + // State of the job. enum State { // Unspecified state. diff --git a/scheduler/google/cloud/scheduler_v1beta1/proto/job_pb2.py b/scheduler/google/cloud/scheduler_v1beta1/proto/job_pb2.py index 0536ae370635..980631b89ea8 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/proto/job_pb2.py +++ b/scheduler/google/cloud/scheduler_v1beta1/proto/job_pb2.py @@ -15,7 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.scheduler_v1beta1.proto import ( target_pb2 as google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_target__pb2, @@ -23,6 +22,7 @@ from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -33,15 +33,15 @@ '\n"com.google.cloud.scheduler.v1beta1B\010JobProtoP\001ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler' ), serialized_pb=_b( - '\n.google/cloud/scheduler_v1beta1/proto/job.proto\x12\x1egoogle.cloud.scheduler.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a\x31google/cloud/scheduler_v1beta1/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\x88\x06\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x45\n\rpubsub_target\x18\x04 \x01(\x0b\x32,.google.cloud.scheduler.v1beta1.PubsubTargetH\x00\x12U\n\x16\x61pp_engine_http_target\x18\x05 \x01(\x0b\x32\x33.google.cloud.scheduler.v1beta1.AppEngineHttpTargetH\x00\x12\x41\n\x0bhttp_target\x18\x06 \x01(\x0b\x32*.google.cloud.scheduler.v1beta1.HttpTargetH\x00\x12\x10\n\x08schedule\x18\x14 \x01(\t\x12\x11\n\ttime_zone\x18\x15 \x01(\t\x12\x34\n\x10user_update_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x05state\x18\n \x01(\x0e\x32).google.cloud.scheduler.v1beta1.Job.State\x12"\n\x06status\x18\x0b \x01(\x0b\x32\x12.google.rpc.Status\x12\x31\n\rschedule_time\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11last_attempt_time\x18\x12 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x41\n\x0cretry_config\x18\x13 \x01(\x0b\x32+.google.cloud.scheduler.v1beta1.RetryConfig\x12\x33\n\x10\x61ttempt_deadline\x18\x16 \x01(\x0b\x32\x19.google.protobuf.Duration"X\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03\x12\x11\n\rUPDATE_FAILED\x10\x04\x42\x08\n\x06target"\xe2\x01\n\x0bRetryConfig\x12\x13\n\x0bretry_count\x18\x01 \x01(\x05\x12\x35\n\x12max_retry_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14min_backoff_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14max_backoff_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x05 \x01(\x05\x42y\n"com.google.cloud.scheduler.v1beta1B\x08JobProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;schedulerb\x06proto3' + '\n.google/cloud/scheduler_v1beta1/proto/job.proto\x12\x1egoogle.cloud.scheduler.v1beta1\x1a\x19google/api/resource.proto\x1a\x31google/cloud/scheduler_v1beta1/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xe4\x06\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x45\n\rpubsub_target\x18\x04 \x01(\x0b\x32,.google.cloud.scheduler.v1beta1.PubsubTargetH\x00\x12U\n\x16\x61pp_engine_http_target\x18\x05 \x01(\x0b\x32\x33.google.cloud.scheduler.v1beta1.AppEngineHttpTargetH\x00\x12\x41\n\x0bhttp_target\x18\x06 \x01(\x0b\x32*.google.cloud.scheduler.v1beta1.HttpTargetH\x00\x12\x10\n\x08schedule\x18\x14 \x01(\t\x12\x11\n\ttime_zone\x18\x15 \x01(\t\x12\x34\n\x10user_update_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x05state\x18\n \x01(\x0e\x32).google.cloud.scheduler.v1beta1.Job.State\x12"\n\x06status\x18\x0b \x01(\x0b\x32\x12.google.rpc.Status\x12\x31\n\rschedule_time\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11last_attempt_time\x18\x12 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x41\n\x0cretry_config\x18\x13 \x01(\x0b\x32+.google.cloud.scheduler.v1beta1.RetryConfig\x12\x33\n\x10\x61ttempt_deadline\x18\x16 \x01(\x0b\x32\x19.google.protobuf.Duration"X\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03\x12\x11\n\rUPDATE_FAILED\x10\x04:Z\xea\x41W\n!cloudscheduler.googleapis.com/Job\x12\x32projects/{project}/locations/{location}/jobs/{job}B\x08\n\x06target"\xe2\x01\n\x0bRetryConfig\x12\x13\n\x0bretry_count\x18\x01 \x01(\x05\x12\x35\n\x12max_retry_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14min_backoff_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14max_backoff_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x05 \x01(\x05\x42y\n"com.google.cloud.scheduler.v1beta1B\x08JobProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;schedulerb\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_target__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) @@ -343,7 +343,9 @@ extensions=[], nested_types=[], enum_types=[_JOB_STATE], - serialized_options=None, + serialized_options=_b( + "\352AW\n!cloudscheduler.googleapis.com/Job\0222projects/{project}/locations/{location}/jobs/{job}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -357,7 +359,7 @@ ) ], serialized_start=281, - serialized_end=1057, + serialized_end=1149, ) @@ -467,8 +469,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1060, - serialized_end=1286, + serialized_start=1152, + serialized_end=1378, ) _JOB.fields_by_name[ @@ -703,4 +705,5 @@ DESCRIPTOR._options = None +_JOB._options = None # @@protoc_insertion_point(module_scope) diff --git a/scheduler/google/cloud/scheduler_v1beta1/proto/target.proto b/scheduler/google/cloud/scheduler_v1beta1/proto/target.proto index 3bb44a1fb85d..4b47e356768b 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/proto/target.proto +++ b/scheduler/google/cloud/scheduler_v1beta1/proto/target.proto @@ -17,8 +17,8 @@ syntax = "proto3"; package google.cloud.scheduler.v1beta1; +import "google/api/resource.proto"; import "google/api/annotations.proto"; -import "google/protobuf/any.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler"; option java_multiple_files = true; @@ -32,9 +32,7 @@ option java_package = "com.google.cloud.scheduler.v1beta1"; // constitutes a failed execution. For a redirected request, the response // returned by the redirected request is considered. message HttpTarget { - // Required. - // - // The full URI path that the request will be sent to. This string + // Required. The full URI path that the request will be sent to. This string // must begin with either "http://" or "https://". Some examples of // valid values for [uri][google.cloud.scheduler.v1beta1.HttpTarget.uri] are: // `http://acme.com` and `https://acme.com/sales:8080`. Cloud Scheduler will @@ -77,8 +75,8 @@ message HttpTarget { // will be generated and attached as an `Authorization` header in the HTTP // request. // - // This type of authorization should be used when sending requests to a GCP - // endpoint. + // This type of authorization should generally only be used when calling + // Google APIs hosted on *.googleapis.com. OAuthToken oauth_token = 5; // If specified, an @@ -86,8 +84,9 @@ message HttpTarget { // token will be generated and attached as an `Authorization` header in the // HTTP request. // - // This type of authorization should be used when sending requests to third - // party endpoints. + // This type of authorization can be used for many scenarios, including + // calling Cloud Run, or endpoints where you intend to validate the token + // yourself. OidcToken oidc_token = 6; } } @@ -162,16 +161,16 @@ message AppEngineHttpTarget { // Pub/Sub target. The job will be delivered by publishing a message to // the given Pub/Sub topic. message PubsubTarget { - // Required. - // - // The name of the Cloud Pub/Sub topic to which messages will + // Required. The name of the Cloud Pub/Sub topic to which messages will // be published when a job is delivered. The topic name must be in the // same format as required by PubSub's // [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest), // for example `projects/PROJECT_ID/topics/TOPIC_ID`. // // The topic must be in the same project as the Cloud Scheduler job. - string topic_name = 1; + string topic_name = 1 [(google.api.resource_reference) = { + type: "pubsub.googleapis.com/Topic" + }]; // The message payload for PubsubMessage. // @@ -315,8 +314,8 @@ enum HttpMethod { // Contains information needed for generating an // [OAuth token](https://developers.google.com/identity/protocols/OAuth2). -// This type of authorization should be used when sending requests to a GCP -// endpoint. +// This type of authorization should generally only be used when calling Google +// APIs hosted on *.googleapis.com. message OAuthToken { // [Service account email](https://cloud.google.com/iam/docs/service-accounts) // to be used for generating OAuth token. @@ -332,9 +331,10 @@ message OAuthToken { // Contains information needed for generating an // [OpenID Connect -// token](https://developers.google.com/identity/protocols/OpenIDConnect). This -// type of authorization should be used when sending requests to third party -// endpoints. +// token](https://developers.google.com/identity/protocols/OpenIDConnect). +// This type of authorization can be used for many scenarios, including +// calling Cloud Run, or endpoints where you intend to validate the token +// yourself. message OidcToken { // [Service account email](https://cloud.google.com/iam/docs/service-accounts) // to be used for generating OIDC token. @@ -346,3 +346,11 @@ message OidcToken { // specified in target will be used. string audience = 2; } + +// The Pub/Sub Topic resource definition is in google/cloud/pubsub/v1/, +// but we do not import that proto directly; therefore, we redefine the +// pattern here. +option (google.api.resource_definition) = { + type: "pubsub.googleapis.com/Topic" + pattern: "projects/{project}/topics/{topic}" +}; diff --git a/scheduler/google/cloud/scheduler_v1beta1/proto/target_pb2.py b/scheduler/google/cloud/scheduler_v1beta1/proto/target_pb2.py index f44a902f973d..e1bb923a9507 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/proto/target_pb2.py +++ b/scheduler/google/cloud/scheduler_v1beta1/proto/target_pb2.py @@ -16,8 +16,8 @@ _sym_db = _symbol_database.Default() +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -25,14 +25,14 @@ package="google.cloud.scheduler.v1beta1", syntax="proto3", serialized_options=_b( - '\n"com.google.cloud.scheduler.v1beta1B\013TargetProtoP\001ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler' + '\n"com.google.cloud.scheduler.v1beta1B\013TargetProtoP\001ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler\352A@\n\033pubsub.googleapis.com/Topic\022!projects/{project}/topics/{topic}' ), serialized_pb=_b( - '\n1google/cloud/scheduler_v1beta1/proto/target.proto\x12\x1egoogle.cloud.scheduler.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/protobuf/any.proto"\xfe\x02\n\nHttpTarget\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12?\n\x0bhttp_method\x18\x02 \x01(\x0e\x32*.google.cloud.scheduler.v1beta1.HttpMethod\x12H\n\x07headers\x18\x03 \x03(\x0b\x32\x37.google.cloud.scheduler.v1beta1.HttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12\x41\n\x0boauth_token\x18\x05 \x01(\x0b\x32*.google.cloud.scheduler.v1beta1.OAuthTokenH\x00\x12?\n\noidc_token\x18\x06 \x01(\x0b\x32).google.cloud.scheduler.v1beta1.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"\xcb\x02\n\x13\x41ppEngineHttpTarget\x12?\n\x0bhttp_method\x18\x01 \x01(\x0e\x32*.google.cloud.scheduler.v1beta1.HttpMethod\x12L\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32\x30.google.cloud.scheduler.v1beta1.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12Q\n\x07headers\x18\x04 \x03(\x0b\x32@.google.cloud.scheduler.v1beta1.AppEngineHttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb5\x01\n\x0cPubsubTarget\x12\x12\n\ntopic_name\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12P\n\nattributes\x18\x04 \x03(\x0b\x32<.google.cloud.scheduler.v1beta1.PubsubTarget.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42|\n"com.google.cloud.scheduler.v1beta1B\x0bTargetProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;schedulerb\x06proto3' + '\n1google/cloud/scheduler_v1beta1/proto/target.proto\x12\x1egoogle.cloud.scheduler.v1beta1\x1a\x19google/api/resource.proto\x1a\x1cgoogle/api/annotations.proto"\xfe\x02\n\nHttpTarget\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12?\n\x0bhttp_method\x18\x02 \x01(\x0e\x32*.google.cloud.scheduler.v1beta1.HttpMethod\x12H\n\x07headers\x18\x03 \x03(\x0b\x32\x37.google.cloud.scheduler.v1beta1.HttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12\x41\n\x0boauth_token\x18\x05 \x01(\x0b\x32*.google.cloud.scheduler.v1beta1.OAuthTokenH\x00\x12?\n\noidc_token\x18\x06 \x01(\x0b\x32).google.cloud.scheduler.v1beta1.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"\xcb\x02\n\x13\x41ppEngineHttpTarget\x12?\n\x0bhttp_method\x18\x01 \x01(\x0e\x32*.google.cloud.scheduler.v1beta1.HttpMethod\x12L\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32\x30.google.cloud.scheduler.v1beta1.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12Q\n\x07headers\x18\x04 \x03(\x0b\x32@.google.cloud.scheduler.v1beta1.AppEngineHttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xd7\x01\n\x0cPubsubTarget\x12\x34\n\ntopic_name\x18\x01 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12P\n\nattributes\x18\x04 \x03(\x0b\x32<.google.cloud.scheduler.v1beta1.PubsubTarget.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42\xbf\x01\n"com.google.cloud.scheduler.v1beta1B\x0bTargetProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler\xea\x41@\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}b\x06proto3' ), dependencies=[ + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_protobuf_dot_any__pb2.DESCRIPTOR, ], ) @@ -73,8 +73,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1253, - serialized_end=1368, + serialized_start=1287, + serialized_end=1402, ) _sym_db.RegisterEnumDescriptor(_HTTPMETHOD) @@ -501,8 +501,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=994, - serialized_end=1043, + serialized_start=1028, + serialized_end=1077, ) _PUBSUBTARGET = _descriptor.Descriptor( @@ -527,7 +527,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\372A\035\n\033pubsub.googleapis.com/Topic"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -576,7 +576,7 @@ extension_ranges=[], oneofs=[], serialized_start=862, - serialized_end=1043, + serialized_end=1077, ) @@ -668,8 +668,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1045, - serialized_end=1129, + serialized_start=1079, + serialized_end=1163, ) @@ -725,8 +725,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1131, - serialized_end=1189, + serialized_start=1165, + serialized_end=1223, ) @@ -782,8 +782,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1191, - serialized_end=1251, + serialized_start=1225, + serialized_end=1285, ) _HTTPTARGET_HEADERSENTRY.containing_type = _HTTPTARGET @@ -848,7 +848,7 @@ Attributes: uri: - Required. The full URI path that the request will be sent to. + Required. The full URI path that the request will be sent to. This string must begin with either "http://" or "https://". Some examples of valid values for [uri][google.cloud.scheduler.v1beta1.HttpTarget.uri] are: @@ -887,14 +887,16 @@ If specified, an `OAuth token `__ will be generated and attached as an ``Authorization`` header - in the HTTP request. This type of authorization should be - used when sending requests to a GCP endpoint. + in the HTTP request. This type of authorization should + generally only be used when calling Google APIs hosted on + \*.googleapis.com. oidc_token: If specified, an `OIDC `__ token will be generated and attached as an ``Authorization`` header in the HTTP request. - This type of authorization should be used when sending - requests to third party endpoints. + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend to + validate the token yourself. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.HttpTarget) ), @@ -999,7 +1001,7 @@ Attributes: topic_name: - Required. The name of the Cloud Pub/Sub topic to which + Required. The name of the Cloud Pub/Sub topic to which messages will be published when a job is delivered. The topic name must be in the same format as required by PubSub's `PublishRequest.name `__. This - type of authorization should be used when sending requests to a GCP - endpoint. + type of authorization should generally only be used when calling Google + APIs hosted on \*.googleapis.com. Attributes: @@ -1150,8 +1152,9 @@ __module__="google.cloud.scheduler_v1beta1.proto.target_pb2", __doc__="""Contains information needed for generating an `OpenID Connect token `__. - This type of authorization should be used when sending requests to third - party endpoints. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the token + yourself. Attributes: @@ -1175,4 +1178,5 @@ _HTTPTARGET_HEADERSENTRY._options = None _APPENGINEHTTPTARGET_HEADERSENTRY._options = None _PUBSUBTARGET_ATTRIBUTESENTRY._options = None +_PUBSUBTARGET.fields_by_name["topic_name"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/scheduler/synth.metadata b/scheduler/synth.metadata index 86c1219f85b7..797c82f3212b 100644 --- a/scheduler/synth.metadata +++ b/scheduler/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:36:15.641949Z", + "updateTime": "2019-10-23T12:31:17.538221Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.2", + "dockerImage": "googleapis/artman@sha256:3b8f7d9b4c206843ce08053474f5c64ae4d388ff7d995e68b59fb65edf73eeb9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "0d0dc5172f16c9815a5eda6e99408fb96282f608", + "internalRef": "276178557" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/securitycenter/docs/conf.py b/securitycenter/docs/conf.py index ebb74d010e89..3e082a2d1a41 100644 --- a/securitycenter/docs/conf.py +++ b/securitycenter/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/securitycenter/google/cloud/securitycenter.py b/securitycenter/google/cloud/securitycenter.py index 7b5ed2b41912..e2b719caee36 100644 --- a/securitycenter/google/cloud/securitycenter.py +++ b/securitycenter/google/cloud/securitycenter.py @@ -22,4 +22,8 @@ from google.cloud.securitycenter_v1 import types -__all__ = ("enums", "types", "SecurityCenterClient") +__all__ = ( + "enums", + "types", + "SecurityCenterClient", +) diff --git a/securitycenter/google/cloud/securitycenter_v1/__init__.py b/securitycenter/google/cloud/securitycenter_v1/__init__.py index f8442f6bfc9b..090c03d10cca 100644 --- a/securitycenter/google/cloud/securitycenter_v1/__init__.py +++ b/securitycenter/google/cloud/securitycenter_v1/__init__.py @@ -27,4 +27,8 @@ class SecurityCenterClient(security_center_client.SecurityCenterClient): enums = enums -__all__ = ("enums", "types", "SecurityCenterClient") +__all__ = ( + "enums", + "types", + "SecurityCenterClient", +) diff --git a/securitycenter/google/cloud/securitycenter_v1/gapic/security_center_client.py b/securitycenter/google/cloud/securitycenter_v1/gapic/security_center_client.py index ccc7c2a93a53..eaf1e33ae381 100644 --- a/securitycenter/google/cloud/securitycenter_v1/gapic/security_center_client.py +++ b/securitycenter/google/cloud/securitycenter_v1/gapic/security_center_client.py @@ -56,7 +56,7 @@ _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-securitycenter" + "google-cloud-securitycenter", ).version @@ -132,7 +132,7 @@ def finding_security_marks_path(cls, organization, source, finding): def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod @@ -147,7 +147,7 @@ def organization_settings_path(cls, organization): def organization_sources_path(cls, organization): """Return a fully-qualified organization_sources string.""" return google.api_core.path_template.expand( - "organizations/{organization}/sources/-", organization=organization + "organizations/{organization}/sources/-", organization=organization, ) @classmethod @@ -246,12 +246,12 @@ def __init__( self.transport = transport else: self.transport = security_center_grpc_transport.SecurityCenterGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -262,7 +262,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -334,7 +334,7 @@ def create_source( ) request = securitycenter_service_pb2.CreateSourceRequest( - parent=parent, source=source + parent=parent, source=source, ) if metadata is None: metadata = [] @@ -423,7 +423,7 @@ def create_finding( ) request = securitycenter_service_pb2.CreateFindingRequest( - parent=parent, finding_id=finding_id, finding=finding + parent=parent, finding_id=finding_id, finding=finding, ) if metadata is None: metadata = [] @@ -501,7 +501,7 @@ def get_iam_policy( ) request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_ + resource=resource, options=options_, ) if metadata is None: metadata = [] @@ -572,7 +572,7 @@ def get_organization_settings( client_info=self._client_info, ) - request = securitycenter_service_pb2.GetOrganizationSettingsRequest(name=name) + request = securitycenter_service_pb2.GetOrganizationSettingsRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -642,7 +642,7 @@ def get_source( client_info=self._client_info, ) - request = securitycenter_service_pb2.GetSourceRequest(name=name) + request = securitycenter_service_pb2.GetSourceRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1592,7 +1592,7 @@ def list_sources( ) request = securitycenter_service_pb2.ListSourcesRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -1688,7 +1688,7 @@ def run_asset_discovery( client_info=self._client_info, ) - request = securitycenter_service_pb2.RunAssetDiscoveryRequest(parent=parent) + request = securitycenter_service_pb2.RunAssetDiscoveryRequest(parent=parent,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1780,7 +1780,7 @@ def set_finding_state( ) request = securitycenter_service_pb2.SetFindingStateRequest( - name=name, state=state, start_time=start_time + name=name, state=state, start_time=start_time, ) if metadata is None: metadata = [] @@ -1862,7 +1862,7 @@ def set_iam_policy( client_info=self._client_info, ) - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1941,7 +1941,7 @@ def test_iam_permissions( ) request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions + resource=resource, permissions=permissions, ) if metadata is None: metadata = [] @@ -2033,7 +2033,7 @@ def update_finding( ) request = securitycenter_service_pb2.UpdateFindingRequest( - finding=finding, update_mask=update_mask + finding=finding, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -2117,7 +2117,7 @@ def update_organization_settings( ) request = securitycenter_service_pb2.UpdateOrganizationSettingsRequest( - organization_settings=organization_settings, update_mask=update_mask + organization_settings=organization_settings, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -2201,7 +2201,7 @@ def update_source( ) request = securitycenter_service_pb2.UpdateSourceRequest( - source=source, update_mask=update_mask + source=source, update_mask=update_mask, ) if metadata is None: metadata = [] diff --git a/securitycenter/google/cloud/securitycenter_v1/gapic/transports/security_center_grpc_transport.py b/securitycenter/google/cloud/securitycenter_v1/gapic/transports/security_center_grpc_transport.py index f93386352719..1b6be666cee2 100644 --- a/securitycenter/google/cloud/securitycenter_v1/gapic/transports/security_center_grpc_transport.py +++ b/securitycenter/google/cloud/securitycenter_v1/gapic/transports/security_center_grpc_transport.py @@ -57,7 +57,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -78,7 +78,7 @@ def __init__( self._stubs = { "security_center_stub": securitycenter_service_pb2_grpc.SecurityCenterStub( channel - ) + ), } # Because this API includes a method that returns a diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/asset_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/asset_pb2.py index 68d334f060d7..ff176dabf050 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/asset_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/asset_pb2.py @@ -176,7 +176,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/finding_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/finding_pb2.py index 5b96096dee3b..f0f534134c40 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/finding_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/finding_pb2.py @@ -315,8 +315,8 @@ ), ], extensions=[], - nested_types=[_FINDING_SOURCEPROPERTIESENTRY], - enum_types=[_FINDING_STATE], + nested_types=[_FINDING_SOURCEPROPERTIESENTRY,], + enum_types=[_FINDING_STATE,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/organization_settings_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/organization_settings_pb2.py index 8e9189fbb162..7056f7b07520 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/organization_settings_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/organization_settings_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\n@google/cloud/securitycenter_v1/proto/organization_settings.proto\x12\x1egoogle.cloud.securitycenter.v1\x1a\x1cgoogle/api/annotations.proto"\xaa\x03\n\x14OrganizationSettings\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1e\n\x16\x65nable_asset_discovery\x18\x02 \x01(\x08\x12i\n\x16\x61sset_discovery_config\x18\x03 \x01(\x0b\x32I.google.cloud.securitycenter.v1.OrganizationSettings.AssetDiscoveryConfig\x1a\xec\x01\n\x14\x41ssetDiscoveryConfig\x12\x13\n\x0bproject_ids\x18\x01 \x03(\t\x12o\n\x0einclusion_mode\x18\x02 \x01(\x0e\x32W.google.cloud.securitycenter.v1.OrganizationSettings.AssetDiscoveryConfig.InclusionMode"N\n\rInclusionMode\x12\x1e\n\x1aINCLUSION_MODE_UNSPECIFIED\x10\x00\x12\x10\n\x0cINCLUDE_ONLY\x10\x01\x12\x0b\n\x07\x45XCLUDE\x10\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06\x42\xda\x01\n"com.google.cloud.securitycenter.v1P\x01ZLgoogle.golang.org/genproto/googleapis/cloud/securitycenter/v1;securitycenter\xaa\x02\x1eGoogle.Cloud.SecurityCenter.V1\xca\x02\x1eGoogle\\Cloud\\SecurityCenter\\V1\xea\x02!Google::Cloud::SecurityCenter::V1b\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], ) @@ -106,7 +106,7 @@ ], extensions=[], nested_types=[], - enum_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG_INCLUSIONMODE], + enum_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG_INCLUSIONMODE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -179,7 +179,7 @@ ), ], extensions=[], - nested_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG], + nested_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/run_asset_discovery_response_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/run_asset_discovery_response_pb2.py index 447d1c4ad733..44504bf5d0f3 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/run_asset_discovery_response_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/run_asset_discovery_response_pb2.py @@ -115,7 +115,7 @@ ], extensions=[], nested_types=[], - enum_types=[_RUNASSETDISCOVERYRESPONSE_STATE], + enum_types=[_RUNASSETDISCOVERYRESPONSE_STATE,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/security_marks_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/security_marks_pb2.py index ab891dada5a8..f06ae1bea6b2 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/security_marks_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/security_marks_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\n9google/cloud/securitycenter_v1/proto/security_marks.proto\x12\x1egoogle.cloud.securitycenter.v1\x1a\x1cgoogle/api/annotations.proto"\x94\x01\n\rSecurityMarks\x12\x0c\n\x04name\x18\x01 \x01(\t\x12G\n\x05marks\x18\x02 \x03(\x0b\x32\x38.google.cloud.securitycenter.v1.SecurityMarks.MarksEntry\x1a,\n\nMarksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\xda\x01\n"com.google.cloud.securitycenter.v1P\x01ZLgoogle.golang.org/genproto/googleapis/cloud/securitycenter/v1;securitycenter\xaa\x02\x1eGoogle.Cloud.SecurityCenter.V1\xca\x02\x1eGoogle\\Cloud\\SecurityCenter\\V1\xea\x02!Google::Cloud::SecurityCenter::V1b\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], ) @@ -133,7 +133,7 @@ ), ], extensions=[], - nested_types=[_SECURITYMARKS_MARKSENTRY], + nested_types=[_SECURITYMARKS_MARKSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/securitycenter_service_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/securitycenter_service_pb2.py index 87d92ccf653a..ec734191b77b 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/securitycenter_service_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/securitycenter_service_pb2.py @@ -288,7 +288,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -327,7 +327,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -923,7 +923,7 @@ ), ], extensions=[], - nested_types=[_GROUPRESULT_PROPERTIESENTRY], + nested_types=[_GROUPRESULT_PROPERTIESENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1278,7 +1278,7 @@ ], extensions=[], nested_types=[], - enum_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT_STATECHANGE], + enum_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT_STATECHANGE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -1369,7 +1369,7 @@ ), ], extensions=[], - nested_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT], + nested_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1592,7 +1592,7 @@ ], extensions=[], nested_types=[], - enum_types=[_LISTFINDINGSRESPONSE_LISTFINDINGSRESULT_STATECHANGE], + enum_types=[_LISTFINDINGSRESPONSE_LISTFINDINGSRESULT_STATECHANGE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -1683,7 +1683,7 @@ ), ], extensions=[], - nested_types=[_LISTFINDINGSRESPONSE_LISTFINDINGSRESULT], + nested_types=[_LISTFINDINGSRESPONSE_LISTFINDINGSRESULT,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1794,7 +1794,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/source_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/source_pb2.py index b9e7fe1d5034..0e0363afce7e 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/source_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/source_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\n1google/cloud/securitycenter_v1/proto/source.proto\x12\x1egoogle.cloud.securitycenter.v1\x1a\x1cgoogle/api/annotations.proto"G\n\x06Source\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\tJ\x04\x08\x04\x10\x05\x42\xda\x01\n"com.google.cloud.securitycenter.v1P\x01ZLgoogle.golang.org/genproto/googleapis/cloud/securitycenter/v1;securitycenter\xaa\x02\x1eGoogle.Cloud.SecurityCenter.V1\xca\x02\x1eGoogle\\Cloud\\SecurityCenter\\V1\xea\x02!Google::Cloud::SecurityCenter::V1b\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], ) diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/__init__.py b/securitycenter/google/cloud/securitycenter_v1beta1/__init__.py index f15d206643a9..15ad4154ae86 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/__init__.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/__init__.py @@ -27,4 +27,8 @@ class SecurityCenterClient(security_center_client.SecurityCenterClient): enums = enums -__all__ = ("enums", "types", "SecurityCenterClient") +__all__ = ( + "enums", + "types", + "SecurityCenterClient", +) diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/gapic/security_center_client.py b/securitycenter/google/cloud/securitycenter_v1beta1/gapic/security_center_client.py index 7b26dffaf78a..1e0535b01237 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/gapic/security_center_client.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/gapic/security_center_client.py @@ -55,7 +55,7 @@ _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-securitycenter" + "google-cloud-securitycenter", ).version @@ -122,7 +122,7 @@ def finding_security_marks_path(cls, organization, source, finding): def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod @@ -229,12 +229,12 @@ def __init__( self.transport = transport else: self.transport = security_center_grpc_transport.SecurityCenterGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -245,7 +245,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -317,7 +317,7 @@ def create_source( ) request = securitycenter_service_pb2.CreateSourceRequest( - parent=parent, source=source + parent=parent, source=source, ) if metadata is None: metadata = [] @@ -406,7 +406,7 @@ def create_finding( ) request = securitycenter_service_pb2.CreateFindingRequest( - parent=parent, finding_id=finding_id, finding=finding + parent=parent, finding_id=finding_id, finding=finding, ) if metadata is None: metadata = [] @@ -484,7 +484,7 @@ def get_iam_policy( ) request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_ + resource=resource, options=options_, ) if metadata is None: metadata = [] @@ -555,7 +555,7 @@ def get_organization_settings( client_info=self._client_info, ) - request = securitycenter_service_pb2.GetOrganizationSettingsRequest(name=name) + request = securitycenter_service_pb2.GetOrganizationSettingsRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -625,7 +625,7 @@ def get_source( client_info=self._client_info, ) - request = securitycenter_service_pb2.GetSourceRequest(name=name) + request = securitycenter_service_pb2.GetSourceRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1428,7 +1428,7 @@ def list_sources( ) request = securitycenter_service_pb2.ListSourcesRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -1524,7 +1524,7 @@ def run_asset_discovery( client_info=self._client_info, ) - request = securitycenter_service_pb2.RunAssetDiscoveryRequest(parent=parent) + request = securitycenter_service_pb2.RunAssetDiscoveryRequest(parent=parent,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1616,7 +1616,7 @@ def set_finding_state( ) request = securitycenter_service_pb2.SetFindingStateRequest( - name=name, state=state, start_time=start_time + name=name, state=state, start_time=start_time, ) if metadata is None: metadata = [] @@ -1698,7 +1698,7 @@ def set_iam_policy( client_info=self._client_info, ) - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1777,7 +1777,7 @@ def test_iam_permissions( ) request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions + resource=resource, permissions=permissions, ) if metadata is None: metadata = [] @@ -1864,7 +1864,7 @@ def update_finding( ) request = securitycenter_service_pb2.UpdateFindingRequest( - finding=finding, update_mask=update_mask + finding=finding, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -1946,7 +1946,7 @@ def update_organization_settings( ) request = securitycenter_service_pb2.UpdateOrganizationSettingsRequest( - organization_settings=organization_settings, update_mask=update_mask + organization_settings=organization_settings, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -2028,7 +2028,7 @@ def update_source( ) request = securitycenter_service_pb2.UpdateSourceRequest( - source=source, update_mask=update_mask + source=source, update_mask=update_mask, ) if metadata is None: metadata = [] diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/gapic/transports/security_center_grpc_transport.py b/securitycenter/google/cloud/securitycenter_v1beta1/gapic/transports/security_center_grpc_transport.py index 46419728cc2d..a7ea75051156 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/gapic/transports/security_center_grpc_transport.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/gapic/transports/security_center_grpc_transport.py @@ -57,7 +57,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -78,7 +78,7 @@ def __init__( self._stubs = { "security_center_stub": securitycenter_service_pb2_grpc.SecurityCenterStub( channel - ) + ), } # Because this API includes a method that returns a diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/proto/asset_pb2.py b/securitycenter/google/cloud/securitycenter_v1beta1/proto/asset_pb2.py index fc37a148ba90..82106ac488af 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/proto/asset_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/proto/asset_pb2.py @@ -325,7 +325,7 @@ ), ], extensions=[], - nested_types=[_ASSET_SECURITYCENTERPROPERTIES, _ASSET_RESOURCEPROPERTIESENTRY], + nested_types=[_ASSET_SECURITYCENTERPROPERTIES, _ASSET_RESOURCEPROPERTIESENTRY,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/proto/finding_pb2.py b/securitycenter/google/cloud/securitycenter_v1beta1/proto/finding_pb2.py index a85a20232890..0415351d44c6 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/proto/finding_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/proto/finding_pb2.py @@ -315,8 +315,8 @@ ), ], extensions=[], - nested_types=[_FINDING_SOURCEPROPERTIESENTRY], - enum_types=[_FINDING_STATE], + nested_types=[_FINDING_SOURCEPROPERTIESENTRY,], + enum_types=[_FINDING_STATE,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/proto/organization_settings_pb2.py b/securitycenter/google/cloud/securitycenter_v1beta1/proto/organization_settings_pb2.py index e7c1588af71e..d8d1b3704faf 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/proto/organization_settings_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/proto/organization_settings_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\nEgoogle/cloud/securitycenter_v1beta1/proto/organization_settings.proto\x12#google.cloud.securitycenter.v1beta1\x1a\x1cgoogle/api/annotations.proto"\xa8\x03\n\x14OrganizationSettings\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1e\n\x16\x65nable_asset_discovery\x18\x02 \x01(\x08\x12n\n\x16\x61sset_discovery_config\x18\x03 \x01(\x0b\x32N.google.cloud.securitycenter.v1beta1.OrganizationSettings.AssetDiscoveryConfig\x1a\xf1\x01\n\x14\x41ssetDiscoveryConfig\x12\x13\n\x0bproject_ids\x18\x01 \x03(\t\x12t\n\x0einclusion_mode\x18\x02 \x01(\x0e\x32\\.google.cloud.securitycenter.v1beta1.OrganizationSettings.AssetDiscoveryConfig.InclusionMode"N\n\rInclusionMode\x12\x1e\n\x1aINCLUSION_MODE_UNSPECIFIED\x10\x00\x12\x10\n\x0cINCLUDE_ONLY\x10\x01\x12\x0b\n\x07\x45XCLUDE\x10\x02\x42~\n\'com.google.cloud.securitycenter.v1beta1P\x01ZQgoogle.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1;securitycenterb\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], ) @@ -106,7 +106,7 @@ ], extensions=[], nested_types=[], - enum_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG_INCLUSIONMODE], + enum_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG_INCLUSIONMODE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -179,7 +179,7 @@ ), ], extensions=[], - nested_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG], + nested_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/proto/security_marks_pb2.py b/securitycenter/google/cloud/securitycenter_v1beta1/proto/security_marks_pb2.py index dcee636baff8..8487bd9ae00d 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/proto/security_marks_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/proto/security_marks_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( "\n>google/cloud/securitycenter_v1beta1/proto/security_marks.proto\x12#google.cloud.securitycenter.v1beta1\x1a\x1cgoogle/api/annotations.proto\"\x99\x01\n\rSecurityMarks\x12\x0c\n\x04name\x18\x01 \x01(\t\x12L\n\x05marks\x18\x02 \x03(\x0b\x32=.google.cloud.securitycenter.v1beta1.SecurityMarks.MarksEntry\x1a,\n\nMarksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42~\n'com.google.cloud.securitycenter.v1beta1P\x01ZQgoogle.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1;securitycenterb\x06proto3" ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], ) @@ -133,7 +133,7 @@ ), ], extensions=[], - nested_types=[_SECURITYMARKS_MARKSENTRY], + nested_types=[_SECURITYMARKS_MARKSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/proto/securitycenter_service_pb2.py b/securitycenter/google/cloud/securitycenter_v1beta1/proto/securitycenter_service_pb2.py index 42d309b63cdb..804cae1e5b39 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/proto/securitycenter_service_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/proto/securitycenter_service_pb2.py @@ -262,7 +262,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -301,7 +301,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -843,7 +843,7 @@ ), ], extensions=[], - nested_types=[_GROUPRESULT_PROPERTIESENTRY], + nested_types=[_GROUPRESULT_PROPERTIESENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1198,7 +1198,7 @@ ], extensions=[], nested_types=[], - enum_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT_STATE], + enum_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT_STATE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -1289,7 +1289,7 @@ ), ], extensions=[], - nested_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT], + nested_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1640,7 +1640,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/proto/source_pb2.py b/securitycenter/google/cloud/securitycenter_v1beta1/proto/source_pb2.py index 0fa2b9c2771d..5b155e82bf61 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/proto/source_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/proto/source_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( "\n6google/cloud/securitycenter_v1beta1/proto/source.proto\x12#google.cloud.securitycenter.v1beta1\x1a\x1cgoogle/api/annotations.proto\"A\n\x06Source\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\tB~\n'com.google.cloud.securitycenter.v1beta1P\x01ZQgoogle.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1;securitycenterb\x06proto3" ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], ) diff --git a/securitycenter/synth.metadata b/securitycenter/synth.metadata index 9530dd07871f..139d6ecd64ba 100644 --- a/securitycenter/synth.metadata +++ b/securitycenter/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:37:26.268094Z", + "updateTime": "2019-10-29T12:35:55.798976Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/spanner/CHANGELOG.md b/spanner/CHANGELOG.md index d217c95b3be7..97593703a647 100644 --- a/spanner/CHANGELOG.md +++ b/spanner/CHANGELOG.md @@ -4,6 +4,47 @@ [1]: https://pypi.org/project/google-cloud-spanner/#history +## 1.12.0 + +10-23-2019 19:09 PDT + + +### Implementation Changes +- Add `batch_create_session` calls to session pools. ([#9488](https://github.com/googleapis/google-cloud-python/pull/9488)) + +### New Features +- Add `client_options` to client constructor. ([#9151](https://github.com/googleapis/google-cloud-python/pull/9151)) + +### Internal / Testing Changes +- Harden 'test_reload_instance' systest against eventual consistency failures. ([#9394](https://github.com/googleapis/google-cloud-python/pull/9394)) +- Harden 'test_transaction_batch_update_w_syntax_error' systest. ([#9395](https://github.com/googleapis/google-cloud-python/pull/9395)) +- Propagate errors from 'Transaction.batch_update' in systest. ([#9393](https://github.com/googleapis/google-cloud-python/pull/9393)) + +## 1.11.0 + +10-15-2019 06:55 PDT + + +### Implementation Changes +- Adjust gRPC timeouts (via synth). ([#9330](https://github.com/googleapis/google-cloud-python/pull/9330)) +- Make `session_count` optional for `SpannerClient.batch_create_sessions` (via synth). ([#9280](https://github.com/googleapis/google-cloud-python/pull/9280)) +- Remove send / receive message size limit, update docstrings (via synth). ([#8968](https://github.com/googleapis/google-cloud-python/pull/8968)) + +### New Features +- Add `batch_create_sessions` method to generated client (via synth). ([#9087](https://github.com/googleapis/google-cloud-python/pull/9087)) + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +### Documentation +- Remove references to old authentication credentials in docs. ([#9456](https://github.com/googleapis/google-cloud-python/pull/9456)) +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Fix `run_in_transaction` return value docs. ([#9264](https://github.com/googleapis/google-cloud-python/pull/9264)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Add DML insert and update examples to README. ([#8698](https://github.com/googleapis/google-cloud-python/pull/8698)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 1.10.0 07-24-2019 17:32 PDT diff --git a/spanner/docs/conf.py b/spanner/docs/conf.py index 320b3c2fc87f..a16cb3fe8851 100644 --- a/spanner/docs/conf.py +++ b/spanner/docs/conf.py @@ -339,7 +339,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/spanner/google/cloud/spanner_v1/client.py b/spanner/google/cloud/spanner_v1/client.py index a6f3bd25f5e6..b35bf19f0796 100644 --- a/spanner/google/cloud/spanner_v1/client.py +++ b/spanner/google/cloud/spanner_v1/client.py @@ -93,11 +93,12 @@ class Client(ClientWithProject): attempt to determine from the environment. :type credentials: - :class:`OAuth2Credentials ` or + :class:`Credentials ` or :data:`NoneType ` - :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not provided, defaults to the Google - Application Default Credentials. + :param credentials: (Optional) The authorization credentials to attach to requests. + These credentials identify this application to the service. + If none are specified, the client will attempt to ascertain + the credentials from the environment. :type client_info: :class:`google.api_core.gapic_v1.client_info.ClientInfo` :param client_info: @@ -110,6 +111,10 @@ class Client(ClientWithProject): :param user_agent: (Deprecated) The user agent to be used with API request. Not used. + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options + on the client. API Endpoint should be set through client_options. :raises: :class:`ValueError ` if both ``read_only`` and ``admin`` are :data:`True` @@ -124,7 +129,12 @@ class Client(ClientWithProject): """The scopes required for Google Cloud Spanner.""" def __init__( - self, project=None, credentials=None, client_info=_CLIENT_INFO, user_agent=None + self, + project=None, + credentials=None, + client_info=_CLIENT_INFO, + user_agent=None, + client_options=None, ): # NOTE: This API has no use for the _http argument, but sending it # will have no impact since the _http() @property only lazily @@ -133,6 +143,7 @@ def __init__( project=project, credentials=credentials, _http=None ) self._client_info = client_info + self._client_options = client_options if user_agent is not None: warnings.warn(_USER_AGENT_DEPRECATED, DeprecationWarning, stacklevel=2) @@ -143,7 +154,7 @@ def credentials(self): """Getter for client's credentials. :rtype: - :class:`OAuth2Credentials ` + :class:`Credentials ` :returns: The credentials stored on the client. """ return self._credentials @@ -172,7 +183,9 @@ def instance_admin_api(self): """Helper for session-related API calls.""" if self._instance_admin_api is None: self._instance_admin_api = InstanceAdminClient( - credentials=self.credentials, client_info=self._client_info + credentials=self.credentials, + client_info=self._client_info, + client_options=self._client_options, ) return self._instance_admin_api @@ -181,7 +194,9 @@ def database_admin_api(self): """Helper for session-related API calls.""" if self._database_admin_api is None: self._database_admin_api = DatabaseAdminClient( - credentials=self.credentials, client_info=self._client_info + credentials=self.credentials, + client_info=self._client_info, + client_options=self._client_options, ) return self._database_admin_api diff --git a/spanner/google/cloud/spanner_v1/database.py b/spanner/google/cloud/spanner_v1/database.py index 77efca155a98..f561ecd4fa9e 100644 --- a/spanner/google/cloud/spanner_v1/database.py +++ b/spanner/google/cloud/spanner_v1/database.py @@ -177,8 +177,11 @@ def spanner_api(self): if isinstance(credentials, google.auth.credentials.Scoped): credentials = credentials.with_scopes((SPANNER_DATA_SCOPE,)) client_info = self._instance._client._client_info + client_options = self._instance._client._client_options self._spanner_api = SpannerClient( - credentials=credentials, client_info=client_info + credentials=credentials, + client_info=client_info, + client_options=client_options, ) return self._spanner_api @@ -416,12 +419,16 @@ def run_in_transaction(self, func, *args, **kw): :param args: additional positional arguments to be passed to ``func``. :type kw: dict - :param kw: optional keyword arguments to be passed to ``func``. + :param kw: (Optional) keyword arguments to be passed to ``func``. If passed, "timeout_secs" will be removed and used to - override the default timeout. + override the default retry timeout which defines maximum timestamp + to continue retrying the transaction. - :rtype: :class:`datetime.datetime` - :returns: timestamp of committed transaction + :rtype: Any + :returns: The return value of ``func``. + + :raises Exception: + reraises any non-ABORT execptions raised by ``func``. """ # Sanity check: Is there a transaction already running? # If there is, then raise a red flag. Otherwise, mark that this one diff --git a/spanner/google/cloud/spanner_v1/gapic/spanner_client.py b/spanner/google/cloud/spanner_v1/gapic/spanner_client.py index 41490b0a2869..f49481d1200b 100644 --- a/spanner/google/cloud/spanner_v1/gapic/spanner_client.py +++ b/spanner/google/cloud/spanner_v1/gapic/spanner_client.py @@ -310,8 +310,8 @@ def create_session( def batch_create_sessions( self, database, + session_count, session_template=None, - session_count=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -329,18 +329,21 @@ def batch_create_sessions( >>> >>> database = client.database_path('[PROJECT]', '[INSTANCE]', '[DATABASE]') >>> - >>> response = client.batch_create_sessions(database) + >>> # TODO: Initialize `session_count`: + >>> session_count = 0 + >>> + >>> response = client.batch_create_sessions(database, session_count) Args: database (str): Required. The database in which the new sessions are created. - session_template (Union[dict, ~google.cloud.spanner_v1.types.Session]): Parameters to be applied to each created session. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.spanner_v1.types.Session` session_count (int): Required. The number of sessions to be created in this batch call. The API may return fewer than the requested number of sessions. If a specific number of sessions are desired, the client can make additional calls to BatchCreateSessions (adjusting ``session_count`` as necessary). + session_template (Union[dict, ~google.cloud.spanner_v1.types.Session]): Parameters to be applied to each created session. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.spanner_v1.types.Session` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -373,8 +376,8 @@ def batch_create_sessions( request = spanner_pb2.BatchCreateSessionsRequest( database=database, - session_template=session_template, session_count=session_count, + session_template=session_template, ) if metadata is None: metadata = [] diff --git a/spanner/google/cloud/spanner_v1/gapic/spanner_client_config.py b/spanner/google/cloud/spanner_v1/gapic/spanner_client_config.py index 0b4722fd8d06..333f72afe28c 100644 --- a/spanner/google/cloud/spanner_v1/gapic/spanner_client_config.py +++ b/spanner/google/cloud/spanner_v1/gapic/spanner_client_config.py @@ -11,19 +11,19 @@ "initial_retry_delay_millis": 250, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 32000, - "initial_rpc_timeout_millis": 60000, + "initial_rpc_timeout_millis": 360000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000, + "max_rpc_timeout_millis": 360000, + "total_timeout_millis": 3600000, }, "streaming": { "initial_retry_delay_millis": 250, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 32000, - "initial_rpc_timeout_millis": 120000, + "initial_rpc_timeout_millis": 360000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 120000, - "total_timeout_millis": 1200000, + "max_rpc_timeout_millis": 360000, + "total_timeout_millis": 3600000, }, "long_running": { "initial_retry_delay_millis": 250, diff --git a/spanner/google/cloud/spanner_v1/pool.py b/spanner/google/cloud/spanner_v1/pool.py index 823681fbc864..4ef5aee9baab 100644 --- a/spanner/google/cloud/spanner_v1/pool.py +++ b/spanner/google/cloud/spanner_v1/pool.py @@ -17,9 +17,9 @@ import datetime from six.moves import queue -from six.moves import xrange from google.cloud.exceptions import NotFound +from google.cloud.spanner_v1._helpers import _metadata_with_prefix _NOW = datetime.datetime.utcnow # unit tests may replace @@ -166,11 +166,20 @@ def bind(self, database): when needed. """ self._database = database + api = database.spanner_api + metadata = _metadata_with_prefix(database.name) while not self._sessions.full(): - session = self._new_session() - session.create() - self._sessions.put(session) + resp = api.batch_create_sessions( + database.name, + self.size - self._sessions.qsize(), + timeout=self.default_timeout, + metadata=metadata, + ) + for session_pb in resp.session: + session = self._new_session() + session._session_id = session_pb.name.split("/")[-1] + self._sessions.put(session) def get(self, timeout=None): # pylint: disable=arguments-differ """Check a session out from the pool. @@ -350,11 +359,22 @@ def bind(self, database): when needed. """ self._database = database - - for _ in xrange(self.size): - session = self._new_session() - session.create() - self.put(session) + api = database.spanner_api + metadata = _metadata_with_prefix(database.name) + created_session_count = 0 + + while created_session_count < self.size: + resp = api.batch_create_sessions( + database.name, + self.size - created_session_count, + timeout=self.default_timeout, + metadata=metadata, + ) + for session_pb in resp.session: + session = self._new_session() + session._session_id = session_pb.name.split("/")[-1] + self.put(session) + created_session_count += len(resp.session) def get(self, timeout=None): # pylint: disable=arguments-differ """Check a session out from the pool. diff --git a/spanner/google/cloud/spanner_v1/session.py b/spanner/google/cloud/spanner_v1/session.py index 4685c8b80137..f8e7e88d9731 100644 --- a/spanner/google/cloud/spanner_v1/session.py +++ b/spanner/google/cloud/spanner_v1/session.py @@ -273,9 +273,10 @@ def run_in_transaction(self, func, *args, **kw): :param args: additional positional arguments to be passed to ``func``. :type kw: dict - :param kw: optional keyword arguments to be passed to ``func``. + :param kw: (Optional) keyword arguments to be passed to ``func``. If passed, "timeout_secs" will be removed and used to - override the default timeout. + override the default retry timeout which defines maximum timestamp + to continue retrying the transaction. :rtype: Any :returns: The return value of ``func``. diff --git a/spanner/setup.py b/spanner/setup.py index 5884bf628b7c..c8c47ef4a8d8 100644 --- a/spanner/setup.py +++ b/spanner/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-spanner" description = "Cloud Spanner API client library" -version = "1.10.0" +version = "1.12.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' @@ -30,7 +30,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc, grpcgcp] >= 1.14.0, < 2.0.0dev", - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", ] extras = {} diff --git a/spanner/synth.metadata b/spanner/synth.metadata index 7646a8a309f1..285778e951a2 100644 --- a/spanner/synth.metadata +++ b/spanner/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-23T12:37:16.324493Z", + "updateTime": "2019-09-27T12:29:07.043834Z", "sources": [ { "generator": { "name": "artman", - "version": "0.34.0", - "dockerImage": "googleapis/artman@sha256:38a27ba6245f96c3e86df7acb2ebcc33b4f186d9e475efe2d64303aec3d4e0ea" + "version": "0.37.1", + "dockerImage": "googleapis/artman@sha256:6068f67900a3f0bdece596b97bda8fc70406ca0e137a941f4c81d3217c994a80" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "9c9f778aedde02f9826d2ae5d0f9c96409ba0f25", - "internalRef": "264996596" + "sha": "cd112d8d255e0099df053643d4bd12c228ef7b1b", + "internalRef": "271468707" } }, { diff --git a/spanner/tests/system/test_system.py b/spanner/tests/system/test_system.py index 730190444edf..abfd1297d7ce 100644 --- a/spanner/tests/system/test_system.py +++ b/spanner/tests/system/test_system.py @@ -24,9 +24,12 @@ import uuid import pytest +import grpc +from google.rpc import code_pb2 from google.api_core import exceptions from google.api_core.datetime_helpers import DatetimeWithNanoseconds + from google.cloud.spanner_v1 import param_types from google.cloud.spanner_v1.proto.type_pb2 import ARRAY from google.cloud.spanner_v1.proto.type_pb2 import BOOL @@ -64,6 +67,10 @@ COUNTERS_TABLE = "counters" COUNTERS_COLUMNS = ("name", "value") +_STATUS_CODE_TO_GRPC_STATUS_CODE = { + member.value[0]: member for member in grpc.StatusCode +} + class Config(object): """Run-time configuration to be modified at set-up. @@ -146,7 +153,13 @@ def test_reload_instance(self): # Make sure metadata unset before reloading. instance.display_name = None - instance.reload() + def _expected_display_name(instance): + return instance.display_name == Config.INSTANCE.display_name + + retry = RetryInstanceState(_expected_display_name) + + retry(instance.reload)() + self.assertEqual(instance.display_name, Config.INSTANCE.display_name) @unittest.skipUnless(CREATE_INSTANCE, "Skipping instance creation") @@ -776,6 +789,15 @@ def test_transaction_execute_update_then_insert_commit(self): # [END spanner_test_dml_update] # [END spanner_test_dml_with_mutation] + @staticmethod + def _check_batch_status(status_code, expected=code_pb2.OK): + if status_code != expected: + grpc_status_code = _STATUS_CODE_TO_GRPC_STATUS_CODE[status_code] + call = FauxCall(status_code) + raise exceptions.from_grpc_status( + grpc_status_code, "batch_update failed", errors=[call] + ) + def test_transaction_batch_update_success(self): # [START spanner_test_dml_with_mutation] # [START spanner_test_dml_update] @@ -808,7 +830,7 @@ def unit_of_work(transaction, self): status, row_counts = transaction.batch_update( [insert_statement, update_statement, delete_statement] ) - self.assertEqual(status.code, 0) # XXX: where are values defined? + self._check_batch_status(status.code) self.assertEqual(len(row_counts), 3) for row_count in row_counts: self.assertEqual(row_count, 1) @@ -849,7 +871,7 @@ def unit_of_work(transaction, self): status, row_counts = transaction.batch_update( insert_statements + update_statements ) - self.assertEqual(status.code, 0) # XXX: where are values defined? + self._check_batch_status(status.code) self.assertEqual(len(row_counts), len(insert_statements) + 1) for row_count in row_counts: self.assertEqual(row_count, 1) @@ -886,18 +908,18 @@ def test_transaction_batch_update_w_syntax_error(self): {"contact_id": Type(code=INT64)}, ) - with session.transaction() as transaction: + def unit_of_work(transaction): rows = list(transaction.read(self.TABLE, self.COLUMNS, self.ALL)) self.assertEqual(rows, []) status, row_counts = transaction.batch_update( [insert_statement, update_statement, delete_statement] ) + self._check_batch_status(status.code, code_pb2.INVALID_ARGUMENT) + self.assertEqual(len(row_counts), 1) + self.assertEqual(row_counts[0], 1) - self.assertEqual(status.code, 3) # XXX: where are values defined? - self.assertEqual(len(row_counts), 1) - for row_count in row_counts: - self.assertEqual(row_count, 1) + session.run_in_transaction(unit_of_work) def test_transaction_batch_update_wo_statements(self): from google.api_core.exceptions import InvalidArgument @@ -2177,3 +2199,21 @@ def _handle_abort_unit_of_work(self, transaction): def handle_abort(self, database): database.run_in_transaction(self._handle_abort_unit_of_work) self.handler_done.set() + + +class FauxCall(object): + def __init__(self, code, details="FauxCall"): + self._code = code + self._details = details + + def initial_metadata(self): + return {} + + def trailing_metadata(self): + return {} + + def code(self): + return self._code + + def details(self): + return self._details diff --git a/spanner/tests/unit/gapic/v1/test_spanner_client_v1.py b/spanner/tests/unit/gapic/v1/test_spanner_client_v1.py index 3509a2d8c639..55610ee40967 100644 --- a/spanner/tests/unit/gapic/v1/test_spanner_client_v1.py +++ b/spanner/tests/unit/gapic/v1/test_spanner_client_v1.py @@ -119,12 +119,15 @@ def test_batch_create_sessions(self): # Setup Request database = client.database_path("[PROJECT]", "[INSTANCE]", "[DATABASE]") + session_count = 185691686 - response = client.batch_create_sessions(database) + response = client.batch_create_sessions(database, session_count) assert expected_response == response assert len(channel.requests) == 1 - expected_request = spanner_pb2.BatchCreateSessionsRequest(database=database) + expected_request = spanner_pb2.BatchCreateSessionsRequest( + database=database, session_count=session_count + ) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -138,9 +141,10 @@ def test_batch_create_sessions_exception(self): # Setup request database = client.database_path("[PROJECT]", "[INSTANCE]", "[DATABASE]") + session_count = 185691686 with pytest.raises(CustomException): - client.batch_create_sessions(database) + client.batch_create_sessions(database, session_count) def test_get_session(self): # Setup Expected Response diff --git a/spanner/tests/unit/test_client.py b/spanner/tests/unit/test_client.py index 8cef6313afe9..e42031cea4fb 100644 --- a/spanner/tests/unit/test_client.py +++ b/spanner/tests/unit/test_client.py @@ -55,6 +55,7 @@ def _constructor_test_helper( expected_creds=None, client_info=None, user_agent=None, + client_options=None, ): from google.cloud.spanner_v1 import client as MUT @@ -79,6 +80,7 @@ def _constructor_test_helper( self.assertEqual(client.project, self.PROJECT) self.assertIs(client._client_info, expected_client_info) self.assertEqual(client.user_agent, user_agent) + self.assertEqual(client._client_options, client_options) def test_constructor_default_scopes(self): from google.cloud.spanner_v1 import client as MUT @@ -130,8 +132,12 @@ def test_instance_admin_api(self): credentials = _make_credentials() client_info = mock.Mock() + client_options = mock.Mock() client = self._make_one( - project=self.PROJECT, credentials=credentials, client_info=client_info + project=self.PROJECT, + credentials=credentials, + client_info=client_info, + client_options=client_options, ) expected_scopes = (SPANNER_ADMIN_SCOPE,) @@ -146,7 +152,9 @@ def test_instance_admin_api(self): self.assertIs(again, api) instance_admin_client.assert_called_once_with( - credentials=credentials.with_scopes.return_value, client_info=client_info + credentials=credentials.with_scopes.return_value, + client_info=client_info, + client_options=client_options, ) credentials.with_scopes.assert_called_once_with(expected_scopes) @@ -156,8 +164,12 @@ def test_database_admin_api(self): credentials = _make_credentials() client_info = mock.Mock() + client_options = mock.Mock() client = self._make_one( - project=self.PROJECT, credentials=credentials, client_info=client_info + project=self.PROJECT, + credentials=credentials, + client_info=client_info, + client_options=client_options, ) expected_scopes = (SPANNER_ADMIN_SCOPE,) @@ -172,7 +184,9 @@ def test_database_admin_api(self): self.assertIs(again, api) database_admin_client.assert_called_once_with( - credentials=credentials.with_scopes.return_value, client_info=client_info + credentials=credentials.with_scopes.return_value, + client_info=client_info, + client_options=client_options, ) credentials.with_scopes.assert_called_once_with(expected_scopes) diff --git a/spanner/tests/unit/test_database.py b/spanner/tests/unit/test_database.py index e553e0bbb8dc..f6f367e00161 100644 --- a/spanner/tests/unit/test_database.py +++ b/spanner/tests/unit/test_database.py @@ -233,6 +233,7 @@ def test_name_property(self): def test_spanner_api_property_w_scopeless_creds(self): client = _Client() client_info = client._client_info = mock.Mock() + client_options = client._client_options = mock.Mock() credentials = client.credentials = object() instance = _Instance(self.INSTANCE_NAME, client=client) pool = _Pool() @@ -250,7 +251,9 @@ def test_spanner_api_property_w_scopeless_creds(self): self.assertIs(again, api) spanner_client.assert_called_once_with( - credentials=credentials, client_info=client_info + credentials=credentials, + client_info=client_info, + client_options=client_options, ) def test_spanner_api_w_scoped_creds(self): @@ -271,6 +274,7 @@ def with_scopes(self, scopes): expected_scopes = (SPANNER_DATA_SCOPE,) client = _Client() client_info = client._client_info = mock.Mock() + client_options = client._client_options = mock.Mock() credentials = client.credentials = _CredentialsWithScopes() instance = _Instance(self.INSTANCE_NAME, client=client) pool = _Pool() @@ -291,6 +295,7 @@ def with_scopes(self, scopes): called_args, called_kw = spanner_client.call_args self.assertEqual(called_args, ()) self.assertEqual(called_kw["client_info"], client_info) + self.assertEqual(called_kw["client_options"], client_options) scoped = called_kw["credentials"] self.assertEqual(scoped._scopes, expected_scopes) self.assertIs(scoped._source, credentials) diff --git a/spanner/tests/unit/test_pool.py b/spanner/tests/unit/test_pool.py index 549044b1f423..eded02ea4e6d 100644 --- a/spanner/tests/unit/test_pool.py +++ b/spanner/tests/unit/test_pool.py @@ -156,8 +156,10 @@ def test_bind(self): self.assertEqual(pool.default_timeout, 10) self.assertTrue(pool._sessions.full()) + api = database.spanner_api + self.assertEqual(api.batch_create_sessions.call_count, 5) for session in SESSIONS: - self.assertTrue(session._created) + session.create.assert_not_called() def test_get_non_expired(self): pool = self._make_one(size=4) @@ -183,7 +185,7 @@ def test_get_expired(self): session = pool.get() self.assertIs(session, SESSIONS[4]) - self.assertTrue(session._created) + session.create.assert_called() self.assertTrue(SESSIONS[0]._exists_checked) self.assertFalse(pool._sessions.full()) @@ -243,8 +245,10 @@ def test_clear(self): pool.bind(database) self.assertTrue(pool._sessions.full()) + api = database.spanner_api + self.assertEqual(api.batch_create_sessions.call_count, 5) for session in SESSIONS: - self.assertTrue(session._created) + session.create.assert_not_called() pool.clear() @@ -286,7 +290,7 @@ def test_get_empty(self): self.assertIsInstance(session, _Session) self.assertIs(session._database, database) - self.assertTrue(session._created) + session.create.assert_called() self.assertTrue(pool._sessions.empty()) def test_get_non_empty_session_exists(self): @@ -299,7 +303,7 @@ def test_get_non_empty_session_exists(self): session = pool.get() self.assertIs(session, previous) - self.assertFalse(session._created) + session.create.assert_not_called() self.assertTrue(session._exists_checked) self.assertTrue(pool._sessions.empty()) @@ -316,7 +320,7 @@ def test_get_non_empty_session_expired(self): self.assertTrue(previous._exists_checked) self.assertIs(session, newborn) - self.assertTrue(session._created) + session.create.assert_called() self.assertFalse(session._exists_checked) self.assertTrue(pool._sessions.empty()) @@ -405,7 +409,6 @@ def test_bind(self): database = _Database("name") SESSIONS = [_Session(database)] * 10 database._sessions.extend(SESSIONS) - pool.bind(database) self.assertIs(pool._database, database) @@ -414,8 +417,10 @@ def test_bind(self): self.assertEqual(pool._delta.seconds, 3000) self.assertTrue(pool._sessions.full()) + api = database.spanner_api + self.assertEqual(api.batch_create_sessions.call_count, 5) for session in SESSIONS: - self.assertTrue(session._created) + session.create.assert_not_called() def test_get_hit_no_ping(self): pool = self._make_one(size=4) @@ -470,7 +475,7 @@ def test_get_hit_w_ping_expired(self): session = pool.get() self.assertIs(session, SESSIONS[4]) - self.assertTrue(session._created) + session.create.assert_called() self.assertTrue(SESSIONS[0]._exists_checked) self.assertFalse(pool._sessions.full()) @@ -538,8 +543,10 @@ def test_clear(self): pool.bind(database) self.assertTrue(pool._sessions.full()) + api = database.spanner_api + self.assertEqual(api.batch_create_sessions.call_count, 5) for session in SESSIONS: - self.assertTrue(session._created) + session.create.assert_not_called() pool.clear() @@ -595,7 +602,7 @@ def test_ping_oldest_stale_and_not_exists(self): pool.ping() self.assertTrue(SESSIONS[0]._exists_checked) - self.assertTrue(SESSIONS[1]._created) + SESSIONS[1].create.assert_called() class TestTransactionPingingPool(unittest.TestCase): @@ -635,7 +642,6 @@ def test_bind(self): database = _Database("name") SESSIONS = [_Session(database) for _ in range(10)] database._sessions.extend(SESSIONS) - pool.bind(database) self.assertIs(pool._database, database) @@ -644,8 +650,10 @@ def test_bind(self): self.assertEqual(pool._delta.seconds, 3000) self.assertTrue(pool._sessions.full()) + api = database.spanner_api + self.assertEqual(api.batch_create_sessions.call_count, 5) for session in SESSIONS: - self.assertTrue(session._created) + session.create.assert_not_called() txn = session._transaction self.assertTrue(txn._begun) @@ -671,8 +679,10 @@ def test_bind_w_timestamp_race(self): self.assertEqual(pool._delta.seconds, 3000) self.assertTrue(pool._sessions.full()) + api = database.spanner_api + self.assertEqual(api.batch_create_sessions.call_count, 5) for session in SESSIONS: - self.assertTrue(session._created) + session.create.assert_not_called() txn = session._transaction self.assertTrue(txn._begun) @@ -843,16 +853,13 @@ def __init__(self, database, exists=True, transaction=None): self._database = database self._exists = exists self._exists_checked = False - self._created = False + self.create = mock.Mock() self._deleted = False self._transaction = transaction def __lt__(self, other): return id(self) < id(other) - def create(self): - self._created = True - def exists(self): self._exists_checked = True return self._exists @@ -874,6 +881,22 @@ def __init__(self, name): self.name = name self._sessions = [] + def mock_batch_create_sessions(db, session_count=10, timeout=10, metadata=[]): + from google.cloud.spanner_v1.proto import spanner_pb2 + + response = spanner_pb2.BatchCreateSessionsResponse() + if session_count < 2: + response.session.add() + else: + response.session.add() + response.session.add() + return response + + from google.cloud.spanner_v1.gapic.spanner_client import SpannerClient + + self.spanner_api = mock.create_autospec(SpannerClient, instance=True) + self.spanner_api.batch_create_sessions.side_effect = mock_batch_create_sessions + def session(self): return self._sessions.pop() diff --git a/speech/docs/conf.py b/speech/docs/conf.py index c5561bbeb3d1..0f99cdc6cb73 100644 --- a/speech/docs/conf.py +++ b/speech/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/speech/docs/gapic/v1/api.rst b/speech/docs/gapic/v1/api.rst index 45658eda89a2..a04f18ad6ba1 100644 --- a/speech/docs/gapic/v1/api.rst +++ b/speech/docs/gapic/v1/api.rst @@ -1,5 +1,5 @@ -Client for Cloud Speech API -=========================== +Client for Cloud Speech-to-Text API +=================================== .. automodule:: google.cloud.speech_v1 :members: diff --git a/speech/docs/gapic/v1/types.rst b/speech/docs/gapic/v1/types.rst index 6083c398def7..71a61184ef32 100644 --- a/speech/docs/gapic/v1/types.rst +++ b/speech/docs/gapic/v1/types.rst @@ -1,5 +1,5 @@ -Types for Cloud Speech API Client -================================= +Types for Cloud Speech-to-Text API Client +========================================= .. automodule:: google.cloud.speech_v1.types :members: \ No newline at end of file diff --git a/speech/docs/gapic/v1p1beta1/api.rst b/speech/docs/gapic/v1p1beta1/api.rst index 4779930fdd9e..9493c970c2bc 100644 --- a/speech/docs/gapic/v1p1beta1/api.rst +++ b/speech/docs/gapic/v1p1beta1/api.rst @@ -1,5 +1,5 @@ -Client for Cloud Speech API -=========================== +Client for Cloud Speech-to-Text API +=================================== .. automodule:: google.cloud.speech_v1p1beta1 :members: diff --git a/speech/docs/gapic/v1p1beta1/types.rst b/speech/docs/gapic/v1p1beta1/types.rst index ce33f105e230..6ce93d567607 100644 --- a/speech/docs/gapic/v1p1beta1/types.rst +++ b/speech/docs/gapic/v1p1beta1/types.rst @@ -1,5 +1,5 @@ -Types for Cloud Speech API Client -================================= +Types for Cloud Speech-to-Text API Client +========================================= .. automodule:: google.cloud.speech_v1p1beta1.types :members: \ No newline at end of file diff --git a/speech/google/cloud/speech_v1/gapic/enums.py b/speech/google/cloud/speech_v1/gapic/enums.py index ebf2fc95619f..aff388c7d8db 100644 --- a/speech/google/cloud/speech_v1/gapic/enums.py +++ b/speech/google/cloud/speech_v1/gapic/enums.py @@ -24,14 +24,16 @@ class AudioEncoding(enum.IntEnum): """ The encoding of the audio data sent in the request. - All encodings support only 1 channel (mono) audio. + All encodings support only 1 channel (mono) audio, unless the + ``audio_channel_count`` and ``enable_separate_recognition_per_channel`` + fields are set. For best results, the audio source should be captured and transmitted using a lossless encoding (``FLAC`` or ``LINEAR16``). The accuracy of the speech recognition can be reduced if lossy codecs are used to capture or transmit audio, particularly if background noise is present. - Lossy codecs include ``MULAW``, ``AMR``, ``AMR_WB``, ``OGG_OPUS``, and - ``SPEEX_WITH_HEADER_BYTE``. + Lossy codecs include ``MULAW``, ``AMR``, ``AMR_WB``, ``OGG_OPUS``, + ``SPEEX_WITH_HEADER_BYTE``, and ``MP3``. The ``FLAC`` and ``WAV`` audio file formats include a header that describes the included audio content. You can request recognition for diff --git a/speech/google/cloud/speech_v1/gapic/speech_client.py b/speech/google/cloud/speech_v1/gapic/speech_client.py index b5bddb0edff8..ee9dae675db4 100644 --- a/speech/google/cloud/speech_v1/gapic/speech_client.py +++ b/speech/google/cloud/speech_v1/gapic/speech_client.py @@ -212,12 +212,12 @@ def recognize( >>> response = client.recognize(config, audio) Args: - config (Union[dict, ~google.cloud.speech_v1.types.RecognitionConfig]): *Required* Provides information to the recognizer that specifies how to + config (Union[dict, ~google.cloud.speech_v1.types.RecognitionConfig]): Required. Provides information to the recognizer that specifies how to process the request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1.types.RecognitionConfig` - audio (Union[dict, ~google.cloud.speech_v1.types.RecognitionAudio]): *Required* The audio data to be recognized. + audio (Union[dict, ~google.cloud.speech_v1.types.RecognitionAudio]): Required. The audio data to be recognized. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1.types.RecognitionAudio` @@ -268,7 +268,9 @@ def long_running_recognize( Performs asynchronous speech recognition: receive results via the google.longrunning.Operations interface. Returns either an ``Operation.error`` or an ``Operation.response`` which contains a - ``LongRunningRecognizeResponse`` message. + ``LongRunningRecognizeResponse`` message. For more information on + asynchronous speech recognition, see the + `how-to `__. Example: >>> from google.cloud import speech_v1 @@ -295,12 +297,12 @@ def long_running_recognize( >>> metadata = response.metadata() Args: - config (Union[dict, ~google.cloud.speech_v1.types.RecognitionConfig]): *Required* Provides information to the recognizer that specifies how to + config (Union[dict, ~google.cloud.speech_v1.types.RecognitionConfig]): Required. Provides information to the recognizer that specifies how to process the request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1.types.RecognitionConfig` - audio (Union[dict, ~google.cloud.speech_v1.types.RecognitionAudio]): *Required* The audio data to be recognized. + audio (Union[dict, ~google.cloud.speech_v1.types.RecognitionAudio]): Required. The audio data to be recognized. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1.types.RecognitionAudio` diff --git a/speech/google/cloud/speech_v1/gapic/speech_client_config.py b/speech/google/cloud/speech_v1/gapic/speech_client_config.py index b54aed26bed8..ff4aeac79b04 100644 --- a/speech/google/cloud/speech_v1/gapic/speech_client_config.py +++ b/speech/google/cloud/speech_v1/gapic/speech_client_config.py @@ -10,26 +10,26 @@ "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 1000000, + "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 1000000, - "total_timeout_millis": 5000000, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, } }, "methods": { "Recognize": { - "timeout_millis": 200000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "LongRunningRecognize": { - "timeout_millis": 200000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "StreamingRecognize": { - "timeout_millis": 200000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, }, diff --git a/speech/google/cloud/speech_v1/gapic/transports/speech_grpc_transport.py b/speech/google/cloud/speech_v1/gapic/transports/speech_grpc_transport.py index c6eec5114f4c..ee71e2b81e10 100644 --- a/speech/google/cloud/speech_v1/gapic/transports/speech_grpc_transport.py +++ b/speech/google/cloud/speech_v1/gapic/transports/speech_grpc_transport.py @@ -134,7 +134,9 @@ def long_running_recognize(self): Performs asynchronous speech recognition: receive results via the google.longrunning.Operations interface. Returns either an ``Operation.error`` or an ``Operation.response`` which contains a - ``LongRunningRecognizeResponse`` message. + ``LongRunningRecognizeResponse`` message. For more information on + asynchronous speech recognition, see the + `how-to `__. Returns: Callable: A callable which accepts the appropriate diff --git a/speech/google/cloud/speech_v1/proto/cloud_speech.proto b/speech/google/cloud/speech_v1/proto/cloud_speech.proto index 90ff515b5f76..9553510ac8ac 100644 --- a/speech/google/cloud/speech_v1/proto/cloud_speech.proto +++ b/speech/google/cloud/speech_v1/proto/cloud_speech.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,7 +18,10 @@ syntax = "proto3"; package google.cloud.speech.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/longrunning/operations.proto"; +import "google/protobuf/any.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; @@ -28,9 +31,13 @@ option go_package = "google.golang.org/genproto/googleapis/cloud/speech/v1;speec option java_multiple_files = true; option java_outer_classname = "SpeechProto"; option java_package = "com.google.cloud.speech.v1"; +option objc_class_prefix = "GCS"; // Service that implements Google Cloud Speech API. service Speech { + option (google.api.default_host) = "speech.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Performs synchronous speech recognition: receive results after all audio // has been sent and processed. rpc Recognize(RecognizeRequest) returns (RecognizeResponse) { @@ -38,52 +45,59 @@ service Speech { post: "/v1/speech:recognize" body: "*" }; + option (google.api.method_signature) = "config,audio"; } // Performs asynchronous speech recognition: receive results via the // google.longrunning.Operations interface. Returns either an // `Operation.error` or an `Operation.response` which contains // a `LongRunningRecognizeResponse` message. - rpc LongRunningRecognize(LongRunningRecognizeRequest) - returns (google.longrunning.Operation) { + // For more information on asynchronous speech recognition, see the + // [how-to](https://cloud.google.com/speech-to-text/docs/async-recognize). + rpc LongRunningRecognize(LongRunningRecognizeRequest) returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1/speech:longrunningrecognize" body: "*" }; + option (google.api.method_signature) = "config,audio"; + option (google.longrunning.operation_info) = { + response_type: "LongRunningRecognizeResponse" + metadata_type: "LongRunningRecognizeMetadata" + }; } // Performs bidirectional streaming speech recognition: receive results while // sending audio. This method is only available via the gRPC API (not REST). - rpc StreamingRecognize(stream StreamingRecognizeRequest) - returns (stream StreamingRecognizeResponse) {} + rpc StreamingRecognize(stream StreamingRecognizeRequest) returns (stream StreamingRecognizeResponse) { + } } // The top-level message sent by the client for the `Recognize` method. message RecognizeRequest { - // *Required* Provides information to the recognizer that specifies how to + // Required. Provides information to the recognizer that specifies how to // process the request. - RecognitionConfig config = 1; + RecognitionConfig config = 1 [(google.api.field_behavior) = REQUIRED]; - // *Required* The audio data to be recognized. - RecognitionAudio audio = 2; + // Required. The audio data to be recognized. + RecognitionAudio audio = 2 [(google.api.field_behavior) = REQUIRED]; } // The top-level message sent by the client for the `LongRunningRecognize` // method. message LongRunningRecognizeRequest { - // *Required* Provides information to the recognizer that specifies how to + // Required. Provides information to the recognizer that specifies how to // process the request. - RecognitionConfig config = 1; + RecognitionConfig config = 1 [(google.api.field_behavior) = REQUIRED]; - // *Required* The audio data to be recognized. - RecognitionAudio audio = 2; + // Required. The audio data to be recognized. + RecognitionAudio audio = 2 [(google.api.field_behavior) = REQUIRED]; } // The top-level message sent by the client for the `StreamingRecognize` method. // Multiple `StreamingRecognizeRequest` messages are sent. The first message -// must contain a `streaming_config` message and must not contain `audio` data. -// All subsequent messages must contain `audio` data and must not contain a -// `streaming_config` message. +// must contain a `streaming_config` message and must not contain +// `audio_content`. All subsequent messages must contain `audio_content` and +// must not contain a `streaming_config` message. message StreamingRecognizeRequest { // The streaming request, which is either a streaming config or audio content. oneof streaming_request { @@ -97,9 +111,9 @@ message StreamingRecognizeRequest { // `StreamingRecognizeRequest` message must not contain `audio_content` data // and all subsequent `StreamingRecognizeRequest` messages must contain // `audio_content` data. The audio bytes must be encoded as specified in - // `RecognitionConfig`. Note: as with all bytes fields, protobuffers use a + // `RecognitionConfig`. Note: as with all bytes fields, proto buffers use a // pure binary representation (not base64). See - // [content limits](/speech-to-text/quotas#content). + // [content limits](https://cloud.google.com/speech-to-text/quotas#content). bytes audio_content = 2; } } @@ -107,11 +121,11 @@ message StreamingRecognizeRequest { // Provides information to the recognizer that specifies how to process the // request. message StreamingRecognitionConfig { - // *Required* Provides information to the recognizer that specifies how to + // Required. Provides information to the recognizer that specifies how to // process the request. - RecognitionConfig config = 1; + RecognitionConfig config = 1 [(google.api.field_behavior) = REQUIRED]; - // *Optional* If `false` or omitted, the recognizer will perform continuous + // If `false` or omitted, the recognizer will perform continuous // recognition (continuing to wait for and process audio even if the user // pauses speaking) until the client closes the input stream (gRPC API) or // until the maximum time limit has been reached. May return multiple @@ -124,7 +138,7 @@ message StreamingRecognitionConfig { // `true`. bool single_utterance = 2; - // *Optional* If `true`, interim results (tentative hypotheses) may be + // If `true`, interim results (tentative hypotheses) may be // returned as they become available (these interim results are indicated with // the `is_final=false` flag). // If `false` or omitted, only `is_final=true` result(s) are returned. @@ -136,13 +150,15 @@ message StreamingRecognitionConfig { message RecognitionConfig { // The encoding of the audio data sent in the request. // - // All encodings support only 1 channel (mono) audio. + // All encodings support only 1 channel (mono) audio, unless the + // `audio_channel_count` and `enable_separate_recognition_per_channel` fields + // are set. // // For best results, the audio source should be captured and transmitted using // a lossless encoding (`FLAC` or `LINEAR16`). The accuracy of the speech // recognition can be reduced if lossy codecs are used to capture or transmit // audio, particularly if background noise is present. Lossy codecs include - // `MULAW`, `AMR`, `AMR_WB`, `OGG_OPUS`, and `SPEEX_WITH_HEADER_BYTE`. + // `MULAW`, `AMR`, `AMR_WB`, `OGG_OPUS`, `SPEEX_WITH_HEADER_BYTE`, and `MP3`. // // The `FLAC` and `WAV` audio file formats include a header that describes the // included audio content. You can request recognition for `WAV` files that @@ -153,8 +169,7 @@ message RecognitionConfig { // an `AudioEncoding` when you send send `FLAC` or `WAV` audio, the // encoding configuration must match the encoding described in the audio // header; otherwise the request returns an - // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT] error - // code. + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT] error code. enum AudioEncoding { // Not specified. ENCODING_UNSPECIFIED = 0; @@ -202,8 +217,7 @@ message RecognitionConfig { // Encoding of audio data sent in all `RecognitionAudio` messages. // This field is optional for `FLAC` and `WAV` audio files and required - // for all other audio formats. For details, see - // [AudioEncoding][google.cloud.speech.v1.RecognitionConfig.AudioEncoding]. + // for all other audio formats. For details, see [AudioEncoding][google.cloud.speech.v1.RecognitionConfig.AudioEncoding]. AudioEncoding encoding = 1; // Sample rate in Hertz of the audio data sent in all @@ -211,12 +225,11 @@ message RecognitionConfig { // 16000 is optimal. For best results, set the sampling rate of the audio // source to 16000 Hz. If that's not possible, use the native sample rate of // the audio source (instead of re-sampling). - // This field is optional for `FLAC` and `WAV` audio files and required - // for all other audio formats. For details, see - // [AudioEncoding][google.cloud.speech.v1.RecognitionConfig.AudioEncoding]. + // This field is optional for FLAC and WAV audio files, but is + // required for all other audio formats. For details, see [AudioEncoding][google.cloud.speech.v1.RecognitionConfig.AudioEncoding]. int32 sample_rate_hertz = 2; - // *Optional* The number of channels in the input audio data. + // The number of channels in the input audio data. // ONLY set this for MULTI-CHANNEL recognition. // Valid values for LINEAR16 and FLAC are `1`-`8`. // Valid values for OGG_OPUS are '1'-'254'. @@ -235,14 +248,15 @@ message RecognitionConfig { // `audio_channel_count` multiplied by the length of the audio. bool enable_separate_recognition_per_channel = 12; - // *Required* The language of the supplied audio as a + // Required. The language of the supplied audio as a // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. // Example: "en-US". - // See [Language Support](/speech-to-text/docs/languages) - // for a list of the currently supported language codes. - string language_code = 3; + // See [Language + // Support](https://cloud.google.com/speech-to-text/docs/languages) for a list + // of the currently supported language codes. + string language_code = 3 [(google.api.field_behavior) = REQUIRED]; - // *Optional* Maximum number of recognition hypotheses to be returned. + // Maximum number of recognition hypotheses to be returned. // Specifically, the maximum number of `SpeechRecognitionAlternative` messages // within each `SpeechRecognitionResult`. // The server may return fewer than `max_alternatives`. @@ -250,24 +264,26 @@ message RecognitionConfig { // one. If omitted, will return a maximum of one. int32 max_alternatives = 4; - // *Optional* If set to `true`, the server will attempt to filter out + // If set to `true`, the server will attempt to filter out // profanities, replacing all but the initial character in each filtered word // with asterisks, e.g. "f***". If set to `false` or omitted, profanities // won't be filtered out. bool profanity_filter = 5; - // *Optional* array of [SpeechContext][google.cloud.speech.v1.SpeechContext]. + // Array of [SpeechContext][google.cloud.speech.v1.SpeechContext]. // A means to provide context to assist the speech recognition. For more - // information, see [Phrase Hints](/speech-to-text/docs/basics#phrase-hints). + // information, see + // [speech + // adaptation](https://cloud.google.com/speech-to-text/docs/context-strength). repeated SpeechContext speech_contexts = 6; - // *Optional* If `true`, the top result includes a list of words and + // If `true`, the top result includes a list of words and // the start and end time offsets (timestamps) for those words. If // `false`, no word-level time offset information is returned. The default is // `false`. bool enable_word_time_offsets = 8; - // *Optional* If 'true', adds punctuation to recognition result hypotheses. + // If 'true', adds punctuation to recognition result hypotheses. // This feature is only available in select languages. Setting this for // requests in other languages has no effect at all. // The default 'false' value does not add punctuation to result hypotheses. @@ -276,10 +292,20 @@ message RecognitionConfig { // premium feature. bool enable_automatic_punctuation = 11; - // *Optional* Metadata regarding this request. + // Config to enable speaker diarization and set additional + // parameters to make diarization better suited for your application. + // Note: When this is enabled, we send all the words from the beginning of the + // audio for the top alternative in every consecutive STREAMING responses. + // This is done in order to improve our speaker tags as our models learn to + // identify the speakers in the conversation over time. + // For non-streaming requests, the diarization results will be provided only + // in the top alternative of the FINAL SpeechRecognitionResult. + SpeakerDiarizationConfig diarization_config = 19; + + // Metadata regarding this request. RecognitionMetadata metadata = 9; - // *Optional* Which model to select for the given request. Select the model + // Which model to select for the given request. Select the model // best suited to your domain to get best results. If a model is not // explicitly specified, then we auto-select a model based on the parameters // in the RecognitionConfig. @@ -313,7 +339,7 @@ message RecognitionConfig { // string model = 13; - // *Optional* Set to true to use an enhanced model for speech recognition. + // Set to true to use an enhanced model for speech recognition. // If `use_enhanced` is set to true and the `model` field is not set, then // an appropriate enhanced model is chosen if an enhanced model exists for // the audio. @@ -324,6 +350,24 @@ message RecognitionConfig { bool use_enhanced = 14; } +// Config to enable speaker diarization. +message SpeakerDiarizationConfig { + // If 'true', enables speaker detection for each recognized word in + // the top alternative of the recognition result using a speaker_tag provided + // in the WordInfo. + bool enable_speaker_diarization = 1; + + // Minimum number of speakers in the conversation. This range gives you more + // flexibility by allowing the system to automatically determine the correct + // number of speakers. If not set, the default value is 2. + int32 min_speaker_count = 2; + + // Maximum number of speakers in the conversation. This range gives you more + // flexibility by allowing the system to automatically determine the correct + // number of speakers. If not set, the default value is 6. + int32 max_speaker_count = 3; +} + // Description of audio data to be recognized. message RecognitionMetadata { // Use case categories that the audio recognition request can be described @@ -364,15 +408,6 @@ message RecognitionMetadata { DICTATION = 8; } - // The use case most closely describing the audio content to be recognized. - InteractionType interaction_type = 1; - - // The industry vertical to which this speech recognition request most - // closely applies. This is most indicative of the topics contained - // in the audio. Use the 6-digit NAICS code to identify the industry - // vertical - see https://www.naics.com/search/. - uint32 industry_naics_code_of_audio = 3; - // Enumerates the types of capture settings describing an audio file. enum MicrophoneDistance { // Audio type is not known. @@ -390,9 +425,6 @@ message RecognitionMetadata { FARFIELD = 3; } - // The audio type that most closely describes the audio being recognized. - MicrophoneDistance microphone_distance = 4; - // The original media the speech was recorded on. enum OriginalMediaType { // Unknown original media type. @@ -405,9 +437,6 @@ message RecognitionMetadata { VIDEO = 2; } - // The original media the speech was recorded on. - OriginalMediaType original_media_type = 5; - // The type of device the speech was recorded with. enum RecordingDeviceType { // The recording device is unknown. @@ -432,6 +461,21 @@ message RecognitionMetadata { OTHER_INDOOR_DEVICE = 6; } + // The use case most closely describing the audio content to be recognized. + InteractionType interaction_type = 1; + + // The industry vertical to which this speech recognition request most + // closely applies. This is most indicative of the topics contained + // in the audio. Use the 6-digit NAICS code to identify the industry + // vertical - see https://www.naics.com/search/. + uint32 industry_naics_code_of_audio = 3; + + // The audio type that most closely describes the audio being recognized. + MicrophoneDistance microphone_distance = 4; + + // The original media the speech was recorded on. + OriginalMediaType original_media_type = 5; + // The type of device the speech was recorded with. RecordingDeviceType recording_device_type = 6; @@ -454,25 +498,31 @@ message RecognitionMetadata { // Provides "hints" to the speech recognizer to favor specific words and phrases // in the results. message SpeechContext { - // *Optional* A list of strings containing words and phrases "hints" so that + // A list of strings containing words and phrases "hints" so that // the speech recognition is more likely to recognize them. This can be used // to improve the accuracy for specific words and phrases, for example, if // specific commands are typically spoken by the user. This can also be used // to add additional words to the vocabulary of the recognizer. See - // [usage limits](/speech-to-text/quotas#content). + // [usage limits](https://cloud.google.com/speech-to-text/quotas#content). + // + // List items can also be set to classes for groups of words that represent + // common concepts that occur in natural language. For example, rather than + // providing phrase hints for every month of the year, using the $MONTH class + // improves the likelihood of correctly transcribing audio that includes + // months. repeated string phrases = 1; } // Contains audio data in the encoding specified in the `RecognitionConfig`. // Either `content` or `uri` must be supplied. Supplying both or neither -// returns [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. -// See [content limits](/speech-to-text/quotas#content). +// returns [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. See +// [content limits](https://cloud.google.com/speech-to-text/quotas#content). message RecognitionAudio { // The audio source, which is either inline content or a Google Cloud // Storage uri. oneof audio_source { // The audio data bytes encoded as specified in - // `RecognitionConfig`. Note: as with all bytes fields, protobuffers use a + // `RecognitionConfig`. Note: as with all bytes fields, proto buffers use a // pure binary representation, whereas JSON representations use base64. bytes content = 1; @@ -481,9 +531,8 @@ message RecognitionAudio { // Currently, only Google Cloud Storage URIs are // supported, which must be specified in the following format: // `gs://bucket_name/object_name` (other URI formats return - // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). - // For more information, see [Request - // URIs](https://cloud.google.com/storage/docs/reference-uris). + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see + // [Request URIs](https://cloud.google.com/storage/docs/reference-uris). string uri = 2; } } @@ -492,7 +541,7 @@ message RecognitionAudio { // contains the result as zero or more sequential `SpeechRecognitionResult` // messages. message RecognizeResponse { - // Output only. Sequential list of transcription results corresponding to + // Sequential list of transcription results corresponding to // sequential portions of audio. repeated SpeechRecognitionResult results = 2; } @@ -503,7 +552,7 @@ message RecognizeResponse { // returned by the `GetOperation` call of the `google::longrunning::Operations` // service. message LongRunningRecognizeResponse { - // Output only. Sequential list of transcription results corresponding to + // Sequential list of transcription results corresponding to // sequential portions of audio. repeated SpeechRecognitionResult results = 2; } @@ -588,44 +637,44 @@ message StreamingRecognizeResponse { END_OF_SINGLE_UTTERANCE = 1; } - // Output only. If set, returns a [google.rpc.Status][google.rpc.Status] - // message that specifies the error for the operation. + // If set, returns a [google.rpc.Status][google.rpc.Status] message that + // specifies the error for the operation. google.rpc.Status error = 1; - // Output only. This repeated list contains zero or more results that + // This repeated list contains zero or more results that // correspond to consecutive portions of the audio currently being processed. // It contains zero or one `is_final=true` result (the newly settled portion), // followed by zero or more `is_final=false` results (the interim results). repeated StreamingRecognitionResult results = 2; - // Output only. Indicates the type of speech event. + // Indicates the type of speech event. SpeechEventType speech_event_type = 4; } // A streaming speech recognition result corresponding to a portion of the audio // that is currently being processed. message StreamingRecognitionResult { - // Output only. May contain one or more recognition hypotheses (up to the + // May contain one or more recognition hypotheses (up to the // maximum specified in `max_alternatives`). // These alternatives are ordered in terms of accuracy, with the top (first) // alternative being the most probable, as ranked by the recognizer. repeated SpeechRecognitionAlternative alternatives = 1; - // Output only. If `false`, this `StreamingRecognitionResult` represents an + // If `false`, this `StreamingRecognitionResult` represents an // interim result that may change. If `true`, this is the final time the // speech service will return this particular `StreamingRecognitionResult`, // the recognizer will not return any further hypotheses for this portion of // the transcript and corresponding audio. bool is_final = 2; - // Output only. An estimate of the likelihood that the recognizer will not + // An estimate of the likelihood that the recognizer will not // change its guess about this interim result. Values range from 0.0 // (completely unstable) to 1.0 (completely stable). // This field is only provided for interim results (`is_final=false`). // The default of 0.0 is a sentinel value indicating `stability` was not set. float stability = 3; - // Output only. Time offset of the end of this result relative to the + // Time offset of the end of this result relative to the // beginning of the audio. google.protobuf.Duration result_end_time = 4; @@ -634,16 +683,16 @@ message StreamingRecognitionResult { // For audio_channel_count = N, its output values can range from '1' to 'N'. int32 channel_tag = 5; - // Output only. The - // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the - // language in this result. This language code was detected to have the most - // likelihood of being spoken in the audio. - string language_code = 6; + // The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of + // the language in this result. This language code was detected to have the + // most likelihood of being spoken in the audio. + string language_code = 6 + [(google.api.field_behavior) = OUTPUT_ONLY]; } // A speech recognition result corresponding to a portion of the audio. message SpeechRecognitionResult { - // Output only. May contain one or more recognition hypotheses (up to the + // May contain one or more recognition hypotheses (up to the // maximum specified in `max_alternatives`). // These alternatives are ordered in terms of accuracy, with the top (first) // alternative being the most probable, as ranked by the recognizer. @@ -657,10 +706,10 @@ message SpeechRecognitionResult { // Alternative hypotheses (a.k.a. n-best list). message SpeechRecognitionAlternative { - // Output only. Transcript text representing the words that the user spoke. + // Transcript text representing the words that the user spoke. string transcript = 1; - // Output only. The confidence estimate between 0.0 and 1.0. A higher number + // The confidence estimate between 0.0 and 1.0. A higher number // indicates an estimated greater likelihood that the recognized words are // correct. This field is set only for the top alternative of a non-streaming // result or, of a streaming result where `is_final=true`. @@ -669,7 +718,7 @@ message SpeechRecognitionAlternative { // The default of 0.0 is a sentinel value indicating `confidence` was not set. float confidence = 2; - // Output only. A list of word-specific information for each recognized word. + // A list of word-specific information for each recognized word. // Note: When `enable_speaker_diarization` is true, you will see all the words // from the beginning of the audio. repeated WordInfo words = 3; @@ -677,7 +726,7 @@ message SpeechRecognitionAlternative { // Word-specific information for recognized words. message WordInfo { - // Output only. Time offset relative to the beginning of the audio, + // Time offset relative to the beginning of the audio, // and corresponding to the start of the spoken word. // This field is only set if `enable_word_time_offsets=true` and only // in the top hypothesis. @@ -685,7 +734,7 @@ message WordInfo { // vary. google.protobuf.Duration start_time = 1; - // Output only. Time offset relative to the beginning of the audio, + // Time offset relative to the beginning of the audio, // and corresponding to the end of the spoken word. // This field is only set if `enable_word_time_offsets=true` and only // in the top hypothesis. @@ -693,6 +742,14 @@ message WordInfo { // vary. google.protobuf.Duration end_time = 2; - // Output only. The word corresponding to this set of information. + // The word corresponding to this set of information. string word = 3; + + // A distinct integer value is assigned for every speaker within + // the audio. This field specifies which one of those speakers was detected to + // have spoken this word. Value ranges from '1' to diarization_speaker_count. + // speaker_tag is set if enable_speaker_diarization = 'true' and only in the + // top alternative. + int32 speaker_tag = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/speech/google/cloud/speech_v1/proto/cloud_speech_pb2.py b/speech/google/cloud/speech_v1/proto/cloud_speech_pb2.py index e2667403c18c..375bc8a1fd89 100644 --- a/speech/google/cloud/speech_v1/proto/cloud_speech_pb2.py +++ b/speech/google/cloud/speech_v1/proto/cloud_speech_pb2.py @@ -16,9 +16,12 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.longrunning import ( operations_pb2 as google_dot_longrunning_dot_operations__pb2, ) +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 @@ -29,14 +32,17 @@ package="google.cloud.speech.v1", syntax="proto3", serialized_options=_b( - "\n\032com.google.cloud.speech.v1B\013SpeechProtoP\001Z\n\x0fspeech_contexts\x18\x06 \x03(\x0b\x32%.google.cloud.speech.v1.SpeechContext\x12 \n\x18\x65nable_word_time_offsets\x18\x08 \x01(\x08\x12$\n\x1c\x65nable_automatic_punctuation\x18\x0b \x01(\x08\x12=\n\x08metadata\x18\t \x01(\x0b\x32+.google.cloud.speech.v1.RecognitionMetadata\x12\r\n\x05model\x18\r \x01(\t\x12\x14\n\x0cuse_enhanced\x18\x0e \x01(\x08"\x8b\x01\n\rAudioEncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x08\n\x04\x46LAC\x10\x02\x12\t\n\x05MULAW\x10\x03\x12\x07\n\x03\x41MR\x10\x04\x12\n\n\x06\x41MR_WB\x10\x05\x12\x0c\n\x08OGG_OPUS\x10\x06\x12\x1a\n\x16SPEEX_WITH_HEADER_BYTE\x10\x07"\xa0\x08\n\x13RecognitionMetadata\x12U\n\x10interaction_type\x18\x01 \x01(\x0e\x32;.google.cloud.speech.v1.RecognitionMetadata.InteractionType\x12$\n\x1cindustry_naics_code_of_audio\x18\x03 \x01(\r\x12[\n\x13microphone_distance\x18\x04 \x01(\x0e\x32>.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance\x12Z\n\x13original_media_type\x18\x05 \x01(\x0e\x32=.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType\x12^\n\x15recording_device_type\x18\x06 \x01(\x0e\x32?.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType\x12\x1d\n\x15recording_device_name\x18\x07 \x01(\t\x12\x1a\n\x12original_mime_type\x18\x08 \x01(\t\x12\x13\n\x0b\x61udio_topic\x18\n \x01(\t"\xc5\x01\n\x0fInteractionType\x12 \n\x1cINTERACTION_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nDISCUSSION\x10\x01\x12\x10\n\x0cPRESENTATION\x10\x02\x12\x0e\n\nPHONE_CALL\x10\x03\x12\r\n\tVOICEMAIL\x10\x04\x12\x1b\n\x17PROFESSIONALLY_PRODUCED\x10\x05\x12\x10\n\x0cVOICE_SEARCH\x10\x06\x12\x11\n\rVOICE_COMMAND\x10\x07\x12\r\n\tDICTATION\x10\x08"d\n\x12MicrophoneDistance\x12#\n\x1fMICROPHONE_DISTANCE_UNSPECIFIED\x10\x00\x12\r\n\tNEARFIELD\x10\x01\x12\x0c\n\x08MIDFIELD\x10\x02\x12\x0c\n\x08\x46\x41RFIELD\x10\x03"N\n\x11OriginalMediaType\x12#\n\x1fORIGINAL_MEDIA_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x41UDIO\x10\x01\x12\t\n\x05VIDEO\x10\x02"\xa4\x01\n\x13RecordingDeviceType\x12%\n!RECORDING_DEVICE_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nSMARTPHONE\x10\x01\x12\x06\n\x02PC\x10\x02\x12\x0e\n\nPHONE_LINE\x10\x03\x12\x0b\n\x07VEHICLE\x10\x04\x12\x18\n\x14OTHER_OUTDOOR_DEVICE\x10\x05\x12\x17\n\x13OTHER_INDOOR_DEVICE\x10\x06" \n\rSpeechContext\x12\x0f\n\x07phrases\x18\x01 \x03(\t"D\n\x10RecognitionAudio\x12\x11\n\x07\x63ontent\x18\x01 \x01(\x0cH\x00\x12\r\n\x03uri\x18\x02 \x01(\tH\x00\x42\x0e\n\x0c\x61udio_source"U\n\x11RecognizeResponse\x12@\n\x07results\x18\x02 \x03(\x0b\x32/.google.cloud.speech.v1.SpeechRecognitionResult"`\n\x1cLongRunningRecognizeResponse\x12@\n\x07results\x18\x02 \x03(\x0b\x32/.google.cloud.speech.v1.SpeechRecognitionResult"\x9e\x01\n\x1cLongRunningRecognizeMetadata\x12\x18\n\x10progress_percent\x18\x01 \x01(\x05\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10last_update_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xb1\x02\n\x1aStreamingRecognizeResponse\x12!\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.Status\x12\x43\n\x07results\x18\x02 \x03(\x0b\x32\x32.google.cloud.speech.v1.StreamingRecognitionResult\x12]\n\x11speech_event_type\x18\x04 \x01(\x0e\x32\x42.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType"L\n\x0fSpeechEventType\x12\x1c\n\x18SPEECH_EVENT_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x45ND_OF_SINGLE_UTTERANCE\x10\x01"\xed\x01\n\x1aStreamingRecognitionResult\x12J\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x34.google.cloud.speech.v1.SpeechRecognitionAlternative\x12\x10\n\x08is_final\x18\x02 \x01(\x08\x12\x11\n\tstability\x18\x03 \x01(\x02\x12\x32\n\x0fresult_end_time\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x13\n\x0b\x63hannel_tag\x18\x05 \x01(\x05\x12\x15\n\rlanguage_code\x18\x06 \x01(\t"z\n\x17SpeechRecognitionResult\x12J\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x34.google.cloud.speech.v1.SpeechRecognitionAlternative\x12\x13\n\x0b\x63hannel_tag\x18\x02 \x01(\x05"w\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12/\n\x05words\x18\x03 \x03(\x0b\x32 .google.cloud.speech.v1.WordInfo"t\n\x08WordInfo\x12-\n\nstart_time\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12+\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0c\n\x04word\x18\x03 \x01(\t2\xa9\x03\n\x06Speech\x12\x81\x01\n\tRecognize\x12(.google.cloud.speech.v1.RecognizeRequest\x1a).google.cloud.speech.v1.RecognizeResponse"\x1f\x82\xd3\xe4\x93\x02\x19"\x14/v1/speech:recognize:\x01*\x12\x96\x01\n\x14LongRunningRecognize\x12\x33.google.cloud.speech.v1.LongRunningRecognizeRequest\x1a\x1d.google.longrunning.Operation"*\x82\xd3\xe4\x93\x02$"\x1f/v1/speech:longrunningrecognize:\x01*\x12\x81\x01\n\x12StreamingRecognize\x12\x31.google.cloud.speech.v1.StreamingRecognizeRequest\x1a\x32.google.cloud.speech.v1.StreamingRecognizeResponse"\x00(\x01\x30\x01\x42l\n\x1a\x63om.google.cloud.speech.v1B\x0bSpeechProtoP\x01Z\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfigB\x03\xe0\x41\x02\x12<\n\x05\x61udio\x18\x02 \x01(\x0b\x32(.google.cloud.speech.v1.RecognitionAudioB\x03\xe0\x41\x02"\x9b\x01\n\x1bLongRunningRecognizeRequest\x12>\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfigB\x03\xe0\x41\x02\x12<\n\x05\x61udio\x18\x02 \x01(\x0b\x32(.google.cloud.speech.v1.RecognitionAudioB\x03\xe0\x41\x02"\x99\x01\n\x19StreamingRecognizeRequest\x12N\n\x10streaming_config\x18\x01 \x01(\x0b\x32\x32.google.cloud.speech.v1.StreamingRecognitionConfigH\x00\x12\x17\n\raudio_content\x18\x02 \x01(\x0cH\x00\x42\x13\n\x11streaming_request"\x8f\x01\n\x1aStreamingRecognitionConfig\x12>\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfigB\x03\xe0\x41\x02\x12\x18\n\x10single_utterance\x18\x02 \x01(\x08\x12\x17\n\x0finterim_results\x18\x03 \x01(\x08"\xdf\x05\n\x11RecognitionConfig\x12I\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32\x37.google.cloud.speech.v1.RecognitionConfig.AudioEncoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\x12\x1b\n\x13\x61udio_channel_count\x18\x07 \x01(\x05\x12/\n\'enable_separate_recognition_per_channel\x18\x0c \x01(\x08\x12\x1a\n\rlanguage_code\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x10max_alternatives\x18\x04 \x01(\x05\x12\x18\n\x10profanity_filter\x18\x05 \x01(\x08\x12>\n\x0fspeech_contexts\x18\x06 \x03(\x0b\x32%.google.cloud.speech.v1.SpeechContext\x12 \n\x18\x65nable_word_time_offsets\x18\x08 \x01(\x08\x12$\n\x1c\x65nable_automatic_punctuation\x18\x0b \x01(\x08\x12L\n\x12\x64iarization_config\x18\x13 \x01(\x0b\x32\x30.google.cloud.speech.v1.SpeakerDiarizationConfig\x12=\n\x08metadata\x18\t \x01(\x0b\x32+.google.cloud.speech.v1.RecognitionMetadata\x12\r\n\x05model\x18\r \x01(\t\x12\x14\n\x0cuse_enhanced\x18\x0e \x01(\x08"\x8b\x01\n\rAudioEncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x08\n\x04\x46LAC\x10\x02\x12\t\n\x05MULAW\x10\x03\x12\x07\n\x03\x41MR\x10\x04\x12\n\n\x06\x41MR_WB\x10\x05\x12\x0c\n\x08OGG_OPUS\x10\x06\x12\x1a\n\x16SPEEX_WITH_HEADER_BYTE\x10\x07"t\n\x18SpeakerDiarizationConfig\x12"\n\x1a\x65nable_speaker_diarization\x18\x01 \x01(\x08\x12\x19\n\x11min_speaker_count\x18\x02 \x01(\x05\x12\x19\n\x11max_speaker_count\x18\x03 \x01(\x05"\xa0\x08\n\x13RecognitionMetadata\x12U\n\x10interaction_type\x18\x01 \x01(\x0e\x32;.google.cloud.speech.v1.RecognitionMetadata.InteractionType\x12$\n\x1cindustry_naics_code_of_audio\x18\x03 \x01(\r\x12[\n\x13microphone_distance\x18\x04 \x01(\x0e\x32>.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance\x12Z\n\x13original_media_type\x18\x05 \x01(\x0e\x32=.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType\x12^\n\x15recording_device_type\x18\x06 \x01(\x0e\x32?.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType\x12\x1d\n\x15recording_device_name\x18\x07 \x01(\t\x12\x1a\n\x12original_mime_type\x18\x08 \x01(\t\x12\x13\n\x0b\x61udio_topic\x18\n \x01(\t"\xc5\x01\n\x0fInteractionType\x12 \n\x1cINTERACTION_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nDISCUSSION\x10\x01\x12\x10\n\x0cPRESENTATION\x10\x02\x12\x0e\n\nPHONE_CALL\x10\x03\x12\r\n\tVOICEMAIL\x10\x04\x12\x1b\n\x17PROFESSIONALLY_PRODUCED\x10\x05\x12\x10\n\x0cVOICE_SEARCH\x10\x06\x12\x11\n\rVOICE_COMMAND\x10\x07\x12\r\n\tDICTATION\x10\x08"d\n\x12MicrophoneDistance\x12#\n\x1fMICROPHONE_DISTANCE_UNSPECIFIED\x10\x00\x12\r\n\tNEARFIELD\x10\x01\x12\x0c\n\x08MIDFIELD\x10\x02\x12\x0c\n\x08\x46\x41RFIELD\x10\x03"N\n\x11OriginalMediaType\x12#\n\x1fORIGINAL_MEDIA_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x41UDIO\x10\x01\x12\t\n\x05VIDEO\x10\x02"\xa4\x01\n\x13RecordingDeviceType\x12%\n!RECORDING_DEVICE_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nSMARTPHONE\x10\x01\x12\x06\n\x02PC\x10\x02\x12\x0e\n\nPHONE_LINE\x10\x03\x12\x0b\n\x07VEHICLE\x10\x04\x12\x18\n\x14OTHER_OUTDOOR_DEVICE\x10\x05\x12\x17\n\x13OTHER_INDOOR_DEVICE\x10\x06" \n\rSpeechContext\x12\x0f\n\x07phrases\x18\x01 \x03(\t"D\n\x10RecognitionAudio\x12\x11\n\x07\x63ontent\x18\x01 \x01(\x0cH\x00\x12\r\n\x03uri\x18\x02 \x01(\tH\x00\x42\x0e\n\x0c\x61udio_source"U\n\x11RecognizeResponse\x12@\n\x07results\x18\x02 \x03(\x0b\x32/.google.cloud.speech.v1.SpeechRecognitionResult"`\n\x1cLongRunningRecognizeResponse\x12@\n\x07results\x18\x02 \x03(\x0b\x32/.google.cloud.speech.v1.SpeechRecognitionResult"\x9e\x01\n\x1cLongRunningRecognizeMetadata\x12\x18\n\x10progress_percent\x18\x01 \x01(\x05\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10last_update_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xb1\x02\n\x1aStreamingRecognizeResponse\x12!\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.Status\x12\x43\n\x07results\x18\x02 \x03(\x0b\x32\x32.google.cloud.speech.v1.StreamingRecognitionResult\x12]\n\x11speech_event_type\x18\x04 \x01(\x0e\x32\x42.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType"L\n\x0fSpeechEventType\x12\x1c\n\x18SPEECH_EVENT_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x45ND_OF_SINGLE_UTTERANCE\x10\x01"\xf2\x01\n\x1aStreamingRecognitionResult\x12J\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x34.google.cloud.speech.v1.SpeechRecognitionAlternative\x12\x10\n\x08is_final\x18\x02 \x01(\x08\x12\x11\n\tstability\x18\x03 \x01(\x02\x12\x32\n\x0fresult_end_time\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x13\n\x0b\x63hannel_tag\x18\x05 \x01(\x05\x12\x1a\n\rlanguage_code\x18\x06 \x01(\tB\x03\xe0\x41\x03"z\n\x17SpeechRecognitionResult\x12J\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x34.google.cloud.speech.v1.SpeechRecognitionAlternative\x12\x13\n\x0b\x63hannel_tag\x18\x02 \x01(\x05"w\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12/\n\x05words\x18\x03 \x03(\x0b\x32 .google.cloud.speech.v1.WordInfo"\x8e\x01\n\x08WordInfo\x12-\n\nstart_time\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12+\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0c\n\x04word\x18\x03 \x01(\t\x12\x18\n\x0bspeaker_tag\x18\x05 \x01(\x05\x42\x03\xe0\x41\x03\x32\xd1\x04\n\x06Speech\x12\x90\x01\n\tRecognize\x12(.google.cloud.speech.v1.RecognizeRequest\x1a).google.cloud.speech.v1.RecognizeResponse".\x82\xd3\xe4\x93\x02\x19"\x14/v1/speech:recognize:\x01*\xda\x41\x0c\x63onfig,audio\x12\xe4\x01\n\x14LongRunningRecognize\x12\x33.google.cloud.speech.v1.LongRunningRecognizeRequest\x1a\x1d.google.longrunning.Operation"x\x82\xd3\xe4\x93\x02$"\x1f/v1/speech:longrunningrecognize:\x01*\xda\x41\x0c\x63onfig,audio\xca\x41<\n\x1cLongRunningRecognizeResponse\x12\x1cLongRunningRecognizeMetadata\x12\x81\x01\n\x12StreamingRecognize\x12\x31.google.cloud.speech.v1.StreamingRecognizeRequest\x1a\x32.google.cloud.speech.v1.StreamingRecognizeResponse"\x00(\x01\x30\x01\x1aI\xca\x41\x15speech.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBr\n\x1a\x63om.google.cloud.speech.v1B\x0bSpeechProtoP\x01Z`__. + fields, proto buffers use a pure binary representation (not + base64). See `content limits `__. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognizeRequest) ), @@ -1837,14 +1958,14 @@ Attributes: config: - *Required* Provides information to the recognizer that + Required. Provides information to the recognizer that specifies how to process the request. single_utterance: - *Optional* If ``false`` or omitted, the recognizer will - perform continuous recognition (continuing to wait for and - process audio even if the user pauses speaking) until the - client closes the input stream (gRPC API) or until the maximum - time limit has been reached. May return multiple + If ``false`` or omitted, the recognizer will perform + continuous recognition (continuing to wait for and process + audio even if the user pauses speaking) until the client + closes the input stream (gRPC API) or until the maximum time + limit has been reached. May return multiple ``StreamingRecognitionResult``\ s with the ``is_final`` flag set to ``true``. If ``true``, the recognizer will detect a single spoken utterance. When it detects that the user has @@ -1853,11 +1974,10 @@ will return no more than one ``StreamingRecognitionResult`` with the ``is_final`` flag set to ``true``. interim_results: - *Optional* If ``true``, interim results (tentative hypotheses) - may be returned as they become available (these interim - results are indicated with the ``is_final=false`` flag). If - ``false`` or omitted, only ``is_final=true`` result(s) are - returned. + If ``true``, interim results (tentative hypotheses) may be + returned as they become available (these interim results are + indicated with the ``is_final=false`` flag). If ``false`` or + omitted, only ``is_final=true`` result(s) are returned. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognitionConfig) ), @@ -1887,15 +2007,15 @@ 16000 is optimal. For best results, set the sampling rate of the audio source to 16000 Hz. If that's not possible, use the native sample rate of the audio source (instead of re- - sampling). This field is optional for ``FLAC`` and ``WAV`` - audio files and required for all other audio formats. For + sampling). This field is optional for FLAC and WAV audio + files, but is required for all other audio formats. For details, see [AudioEncoding][google.cloud.speech.v1.Recognitio nConfig.AudioEncoding]. audio_channel_count: - *Optional* The number of channels in the input audio data. - ONLY set this for MULTI-CHANNEL recognition. Valid values for - LINEAR16 and FLAC are ``1``-``8``. Valid values for OGG\_OPUS - are '1'-'254'. Valid value for MULAW, AMR, AMR\_WB and + The number of channels in the input audio data. ONLY set this + for MULTI-CHANNEL recognition. Valid values for LINEAR16 and + FLAC are ``1``-``8``. Valid values for OGG\_OPUS are + '1'-'254'. Valid value for MULAW, AMR, AMR\_WB and SPEEX\_WITH\_HEADER\_BYTE is only ``1``. If ``0`` or omitted, defaults to one channel (mono). Note: We only recognize the first channel by default. To perform independent recognition @@ -1911,55 +2031,64 @@ channels recognized: ``audio_channel_count`` multiplied by the length of the audio. language_code: - *Required* The language of the supplied audio as a `BCP-47 + Required. The language of the supplied audio as a `BCP-47 `__ language - tag. Example: "en-US". See `Language Support `__ for a list of the currently supported - language codes. + tag. Example: "en-US". See `Language Support + `__ + for a list of the currently supported language codes. max_alternatives: - *Optional* Maximum number of recognition hypotheses to be - returned. Specifically, the maximum number of + Maximum number of recognition hypotheses to be returned. + Specifically, the maximum number of ``SpeechRecognitionAlternative`` messages within each ``SpeechRecognitionResult``. The server may return fewer than ``max_alternatives``. Valid values are ``0``-``30``. A value of ``0`` or ``1`` will return a maximum of one. If omitted, will return a maximum of one. profanity_filter: - *Optional* If set to ``true``, the server will attempt to - filter out profanities, replacing all but the initial - character in each filtered word with asterisks, e.g. - "f\*\*\*". If set to ``false`` or omitted, profanities won't - be filtered out. + If set to ``true``, the server will attempt to filter out + profanities, replacing all but the initial character in each + filtered word with asterisks, e.g. "f\*\*\*". If set to + ``false`` or omitted, profanities won't be filtered out. speech_contexts: - *Optional* array of + Array of [SpeechContext][google.cloud.speech.v1.SpeechContext]. A means to provide context to assist the speech recognition. For more - information, see `Phrase Hints `__. + information, see `speech adaptation + `__. enable_word_time_offsets: - *Optional* If ``true``, the top result includes a list of - words and the start and end time offsets (timestamps) for - those words. If ``false``, no word-level time offset - information is returned. The default is ``false``. + If ``true``, the top result includes a list of words and the + start and end time offsets (timestamps) for those words. If + ``false``, no word-level time offset information is returned. + The default is ``false``. enable_automatic_punctuation: - *Optional* If 'true', adds punctuation to recognition result - hypotheses. This feature is only available in select - languages. Setting this for requests in other languages has no - effect at all. The default 'false' value does not add - punctuation to result hypotheses. Note: This is currently - offered as an experimental service, complimentary to all - users. In the future this may be exclusively available as a - premium feature. + If 'true', adds punctuation to recognition result hypotheses. + This feature is only available in select languages. Setting + this for requests in other languages has no effect at all. The + default 'false' value does not add punctuation to result + hypotheses. Note: This is currently offered as an experimental + service, complimentary to all users. In the future this may be + exclusively available as a premium feature. + diarization_config: + Config to enable speaker diarization and set additional + parameters to make diarization better suited for your + application. Note: When this is enabled, we send all the words + from the beginning of the audio for the top alternative in + every consecutive STREAMING responses. This is done in order + to improve our speaker tags as our models learn to identify + the speakers in the conversation over time. For non-streaming + requests, the diarization results will be provided only in the + top alternative of the FINAL SpeechRecognitionResult. metadata: - *Optional* Metadata regarding this request. + Metadata regarding this request. model: - *Optional* Which model to select for the given request. Select - the model best suited to your domain to get best results. If a - model is not explicitly specified, then we auto-select a model - based on the parameters in the RecognitionConfig. .. raw:: - html .. raw:: html :: - .. - raw:: html .. raw:: html :: + Which model to select for the given request. Select the model + best suited to your domain to get best results. If a model is + not explicitly specified, then we auto-select a model based on + the parameters in the RecognitionConfig. .. raw:: html +
Model Description
.. raw:: html :: + .. raw:: html .. raw:: + html :: .. raw:: html .. raw:: html :: @@ -1978,19 +2107,49 @@ recorded at a 16khz or greater sampling rate. .. raw:: html .. raw:: html
ModelDescription
command_and_search Best for short queries such as voice commands or voice search.
use_enhanced: - *Optional* Set to true to use an enhanced model for speech - recognition. If ``use_enhanced`` is set to true and the - ``model`` field is not set, then an appropriate enhanced model - is chosen if an enhanced model exists for the audio. If - ``use_enhanced`` is true and an enhanced version of the - specified model does not exist, then the speech is recognized - using the standard version of the specified model. + Set to true to use an enhanced model for speech recognition. + If ``use_enhanced`` is set to true and the ``model`` field is + not set, then an appropriate enhanced model is chosen if an + enhanced model exists for the audio. If ``use_enhanced`` is + true and an enhanced version of the specified model does not + exist, then the speech is recognized using the standard + version of the specified model. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.RecognitionConfig) ), ) _sym_db.RegisterMessage(RecognitionConfig) +SpeakerDiarizationConfig = _reflection.GeneratedProtocolMessageType( + "SpeakerDiarizationConfig", + (_message.Message,), + dict( + DESCRIPTOR=_SPEAKERDIARIZATIONCONFIG, + __module__="google.cloud.speech_v1.proto.cloud_speech_pb2", + __doc__="""Config to enable speaker diarization. + + + Attributes: + enable_speaker_diarization: + If 'true', enables speaker detection for each recognized word + in the top alternative of the recognition result using a + speaker\_tag provided in the WordInfo. + min_speaker_count: + Minimum number of speakers in the conversation. This range + gives you more flexibility by allowing the system to + automatically determine the correct number of speakers. If not + set, the default value is 2. + max_speaker_count: + Maximum number of speakers in the conversation. This range + gives you more flexibility by allowing the system to + automatically determine the correct number of speakers. If not + set, the default value is 6. + """, + # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.SpeakerDiarizationConfig) + ), +) +_sym_db.RegisterMessage(SpeakerDiarizationConfig) + RecognitionMetadata = _reflection.GeneratedProtocolMessageType( "RecognitionMetadata", (_message.Message,), @@ -2047,13 +2206,18 @@ Attributes: phrases: - *Optional* A list of strings containing words and phrases - "hints" so that the speech recognition is more likely to - recognize them. This can be used to improve the accuracy for - specific words and phrases, for example, if specific commands - are typically spoken by the user. This can also be used to add - additional words to the vocabulary of the recognizer. See - `usage limits `__. + A list of strings containing words and phrases "hints" so that + the speech recognition is more likely to recognize them. This + can be used to improve the accuracy for specific words and + phrases, for example, if specific commands are typically + spoken by the user. This can also be used to add additional + words to the vocabulary of the recognizer. See `usage limits + `__. + List items can also be set to classes for groups of words that + represent common concepts that occur in natural language. For + example, rather than providing phrase hints for every month of + the year, using the $MONTH class improves the likelihood of + correctly transcribing audio that includes months. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.SpeechContext) ), @@ -2070,7 +2234,8 @@ ``RecognitionConfig``. Either ``content`` or ``uri`` must be supplied. Supplying both or neither returns [google.rpc.Code.INVALID\_ARGUMENT][google.rpc.Code.INVALID\_ARGUMENT]. - See `content limits `__. + See `content + limits `__. Attributes: @@ -2079,8 +2244,8 @@ Cloud Storage uri. content: The audio data bytes encoded as specified in - ``RecognitionConfig``. Note: as with all bytes fields, - protobuffers use a pure binary representation, whereas JSON + ``RecognitionConfig``. Note: as with all bytes fields, proto + buffers use a pure binary representation, whereas JSON representations use base64. uri: URI that points to a file that contains audio data bytes as @@ -2111,8 +2276,8 @@ Attributes: results: - Output only. Sequential list of transcription results - corresponding to sequential portions of audio. + Sequential list of transcription results corresponding to + sequential portions of audio. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.RecognizeResponse) ), @@ -2135,8 +2300,8 @@ Attributes: results: - Output only. Sequential list of transcription results - corresponding to sequential portions of audio. + Sequential list of transcription results corresponding to + sequential portions of audio. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.LongRunningRecognizeResponse) ), @@ -2229,17 +2394,16 @@ Attributes: error: - Output only. If set, returns a - [google.rpc.Status][google.rpc.Status] message that specifies - the error for the operation. + If set, returns a [google.rpc.Status][google.rpc.Status] + message that specifies the error for the operation. results: - Output only. This repeated list contains zero or more results - that correspond to consecutive portions of the audio currently + This repeated list contains zero or more results that + correspond to consecutive portions of the audio currently being processed. It contains zero or one ``is_final=true`` result (the newly settled portion), followed by zero or more ``is_final=false`` results (the interim results). speech_event_type: - Output only. Indicates the type of speech event. + Indicates the type of speech event. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognizeResponse) ), @@ -2258,38 +2422,38 @@ Attributes: alternatives: - Output only. May contain one or more recognition hypotheses - (up to the maximum specified in ``max_alternatives``). These - alternatives are ordered in terms of accuracy, with the top - (first) alternative being the most probable, as ranked by the + May contain one or more recognition hypotheses (up to the + maximum specified in ``max_alternatives``). These alternatives + are ordered in terms of accuracy, with the top (first) + alternative being the most probable, as ranked by the recognizer. is_final: - Output only. If ``false``, this ``StreamingRecognitionResult`` - represents an interim result that may change. If ``true``, - this is the final time the speech service will return this - particular ``StreamingRecognitionResult``, the recognizer will - not return any further hypotheses for this portion of the - transcript and corresponding audio. + If ``false``, this ``StreamingRecognitionResult`` represents + an interim result that may change. If ``true``, this is the + final time the speech service will return this particular + ``StreamingRecognitionResult``, the recognizer will not return + any further hypotheses for this portion of the transcript and + corresponding audio. stability: - Output only. An estimate of the likelihood that the recognizer - will not change its guess about this interim result. Values - range from 0.0 (completely unstable) to 1.0 (completely - stable). This field is only provided for interim results + An estimate of the likelihood that the recognizer will not + change its guess about this interim result. Values range from + 0.0 (completely unstable) to 1.0 (completely stable). This + field is only provided for interim results (``is_final=false``). The default of 0.0 is a sentinel value indicating ``stability`` was not set. result_end_time: - Output only. Time offset of the end of this result relative to - the beginning of the audio. + Time offset of the end of this result relative to the + beginning of the audio. channel_tag: For multi-channel audio, this is the channel number corresponding to the recognized result for the audio from that channel. For audio\_channel\_count = N, its output values can range from '1' to 'N'. language_code: - Output only. The `BCP-47 `__ language tag of the language - in this result. This language code was detected to have the - most likelihood of being spoken in the audio. + The `BCP-47 `__ + language tag of the language in this result. This language + code was detected to have the most likelihood of being spoken + in the audio. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognitionResult) ), @@ -2307,10 +2471,10 @@ Attributes: alternatives: - Output only. May contain one or more recognition hypotheses - (up to the maximum specified in ``max_alternatives``). These - alternatives are ordered in terms of accuracy, with the top - (first) alternative being the most probable, as ranked by the + May contain one or more recognition hypotheses (up to the + maximum specified in ``max_alternatives``). These alternatives + are ordered in terms of accuracy, with the top (first) + alternative being the most probable, as ranked by the recognizer. channel_tag: For multi-channel audio, this is the channel number @@ -2334,22 +2498,20 @@ Attributes: transcript: - Output only. Transcript text representing the words that the - user spoke. + Transcript text representing the words that the user spoke. confidence: - Output only. The confidence estimate between 0.0 and 1.0. A - higher number indicates an estimated greater likelihood that - the recognized words are correct. This field is set only for - the top alternative of a non-streaming result or, of a - streaming result where ``is_final=true``. This field is not - guaranteed to be accurate and users should not rely on it to - be always provided. The default of 0.0 is a sentinel value - indicating ``confidence`` was not set. + The confidence estimate between 0.0 and 1.0. A higher number + indicates an estimated greater likelihood that the recognized + words are correct. This field is set only for the top + alternative of a non-streaming result or, of a streaming + result where ``is_final=true``. This field is not guaranteed + to be accurate and users should not rely on it to be always + provided. The default of 0.0 is a sentinel value indicating + ``confidence`` was not set. words: - Output only. A list of word-specific information for each - recognized word. Note: When ``enable_speaker_diarization`` is - true, you will see all the words from the beginning of the - audio. + A list of word-specific information for each recognized word. + Note: When ``enable_speaker_diarization`` is true, you will + see all the words from the beginning of the audio. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.SpeechRecognitionAlternative) ), @@ -2367,20 +2529,26 @@ Attributes: start_time: - Output only. Time offset relative to the beginning of the - audio, and corresponding to the start of the spoken word. This - field is only set if ``enable_word_time_offsets=true`` and - only in the top hypothesis. This is an experimental feature - and the accuracy of the time offset can vary. + Time offset relative to the beginning of the audio, and + corresponding to the start of the spoken word. This field is + only set if ``enable_word_time_offsets=true`` and only in the + top hypothesis. This is an experimental feature and the + accuracy of the time offset can vary. end_time: - Output only. Time offset relative to the beginning of the - audio, and corresponding to the end of the spoken word. This - field is only set if ``enable_word_time_offsets=true`` and - only in the top hypothesis. This is an experimental feature - and the accuracy of the time offset can vary. + Time offset relative to the beginning of the audio, and + corresponding to the end of the spoken word. This field is + only set if ``enable_word_time_offsets=true`` and only in the + top hypothesis. This is an experimental feature and the + accuracy of the time offset can vary. word: - Output only. The word corresponding to this set of - information. + The word corresponding to this set of information. + speaker_tag: + A distinct integer value is assigned for every speaker within + the audio. This field specifies which one of those speakers + was detected to have spoken this word. Value ranges from '1' + to diarization\_speaker\_count. speaker\_tag is set if + enable\_speaker\_diarization = 'true' and only in the top + alternative. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1.WordInfo) ), @@ -2389,15 +2557,25 @@ DESCRIPTOR._options = None +_RECOGNIZEREQUEST.fields_by_name["config"]._options = None +_RECOGNIZEREQUEST.fields_by_name["audio"]._options = None +_LONGRUNNINGRECOGNIZEREQUEST.fields_by_name["config"]._options = None +_LONGRUNNINGRECOGNIZEREQUEST.fields_by_name["audio"]._options = None +_STREAMINGRECOGNITIONCONFIG.fields_by_name["config"]._options = None +_RECOGNITIONCONFIG.fields_by_name["language_code"]._options = None +_STREAMINGRECOGNITIONRESULT.fields_by_name["language_code"]._options = None +_WORDINFO.fields_by_name["speaker_tag"]._options = None _SPEECH = _descriptor.ServiceDescriptor( name="Speech", full_name="google.cloud.speech.v1.Speech", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=3890, - serialized_end=4315, + serialized_options=_b( + "\312A\025speech.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=4233, + serialized_end=4826, methods=[ _descriptor.MethodDescriptor( name="Recognize", @@ -2407,7 +2585,7 @@ input_type=_RECOGNIZEREQUEST, output_type=_RECOGNIZERESPONSE, serialized_options=_b( - '\202\323\344\223\002\031"\024/v1/speech:recognize:\001*' + '\202\323\344\223\002\031"\024/v1/speech:recognize:\001*\332A\014config,audio' ), ), _descriptor.MethodDescriptor( @@ -2418,7 +2596,7 @@ input_type=_LONGRUNNINGRECOGNIZEREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002$"\037/v1/speech:longrunningrecognize:\001*' + '\202\323\344\223\002$"\037/v1/speech:longrunningrecognize:\001*\332A\014config,audio\312A<\n\034LongRunningRecognizeResponse\022\034LongRunningRecognizeMetadata' ), ), _descriptor.MethodDescriptor( diff --git a/speech/google/cloud/speech_v1/proto/cloud_speech_pb2_grpc.py b/speech/google/cloud/speech_v1/proto/cloud_speech_pb2_grpc.py index 1928cdb085d6..a7ddcb9c8865 100644 --- a/speech/google/cloud/speech_v1/proto/cloud_speech_pb2_grpc.py +++ b/speech/google/cloud/speech_v1/proto/cloud_speech_pb2_grpc.py @@ -53,6 +53,8 @@ def LongRunningRecognize(self, request, context): google.longrunning.Operations interface. Returns either an `Operation.error` or an `Operation.response` which contains a `LongRunningRecognizeResponse` message. + For more information on asynchronous speech recognition, see the + [how-to](https://cloud.google.com/speech-to-text/docs/async-recognize). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/speech/google/cloud/speech_v1p1beta1/gapic/enums.py b/speech/google/cloud/speech_v1p1beta1/gapic/enums.py index 4af44fa47fe3..3b7882080c9f 100644 --- a/speech/google/cloud/speech_v1p1beta1/gapic/enums.py +++ b/speech/google/cloud/speech_v1p1beta1/gapic/enums.py @@ -24,14 +24,16 @@ class AudioEncoding(enum.IntEnum): """ The encoding of the audio data sent in the request. - All encodings support only 1 channel (mono) audio. + All encodings support only 1 channel (mono) audio, unless the + ``audio_channel_count`` and ``enable_separate_recognition_per_channel`` + fields are set. For best results, the audio source should be captured and transmitted using a lossless encoding (``FLAC`` or ``LINEAR16``). The accuracy of the speech recognition can be reduced if lossy codecs are used to capture or transmit audio, particularly if background noise is present. - Lossy codecs include ``MULAW``, ``AMR``, ``AMR_WB``, ``OGG_OPUS``, and - ``SPEEX_WITH_HEADER_BYTE``. + Lossy codecs include ``MULAW``, ``AMR``, ``AMR_WB``, ``OGG_OPUS``, + ``SPEEX_WITH_HEADER_BYTE``, and ``MP3``. The ``FLAC`` and ``WAV`` audio file formats include a header that describes the included audio content. You can request recognition for diff --git a/speech/google/cloud/speech_v1p1beta1/gapic/speech_client.py b/speech/google/cloud/speech_v1p1beta1/gapic/speech_client.py index 320abd7e8f8b..25e7dcea2281 100644 --- a/speech/google/cloud/speech_v1p1beta1/gapic/speech_client.py +++ b/speech/google/cloud/speech_v1p1beta1/gapic/speech_client.py @@ -212,12 +212,12 @@ def recognize( >>> response = client.recognize(config, audio) Args: - config (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionConfig]): *Required* Provides information to the recognizer that specifies how to + config (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionConfig]): Required. Provides information to the recognizer that specifies how to process the request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionConfig` - audio (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionAudio]): *Required* The audio data to be recognized. + audio (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionAudio]): Required. The audio data to be recognized. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionAudio` @@ -268,7 +268,9 @@ def long_running_recognize( Performs asynchronous speech recognition: receive results via the google.longrunning.Operations interface. Returns either an ``Operation.error`` or an ``Operation.response`` which contains a - ``LongRunningRecognizeResponse`` message. + ``LongRunningRecognizeResponse`` message. For more information on + asynchronous speech recognition, see the + `how-to `__. Example: >>> from google.cloud import speech_v1p1beta1 @@ -295,12 +297,12 @@ def long_running_recognize( >>> metadata = response.metadata() Args: - config (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionConfig]): *Required* Provides information to the recognizer that specifies how to + config (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionConfig]): Required. Provides information to the recognizer that specifies how to process the request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionConfig` - audio (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionAudio]): *Required* The audio data to be recognized. + audio (Union[dict, ~google.cloud.speech_v1p1beta1.types.RecognitionAudio]): Required. The audio data to be recognized. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.speech_v1p1beta1.types.RecognitionAudio` diff --git a/speech/google/cloud/speech_v1p1beta1/gapic/speech_client_config.py b/speech/google/cloud/speech_v1p1beta1/gapic/speech_client_config.py index 8360e0273019..762b5e7b9856 100644 --- a/speech/google/cloud/speech_v1p1beta1/gapic/speech_client_config.py +++ b/speech/google/cloud/speech_v1p1beta1/gapic/speech_client_config.py @@ -10,26 +10,26 @@ "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 1000000, + "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 1000000, - "total_timeout_millis": 5000000, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, } }, "methods": { "Recognize": { - "timeout_millis": 200000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "LongRunningRecognize": { - "timeout_millis": 200000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "StreamingRecognize": { - "timeout_millis": 905000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, }, diff --git a/speech/google/cloud/speech_v1p1beta1/gapic/transports/speech_grpc_transport.py b/speech/google/cloud/speech_v1p1beta1/gapic/transports/speech_grpc_transport.py index 1c373fccd56b..fb46114550d1 100644 --- a/speech/google/cloud/speech_v1p1beta1/gapic/transports/speech_grpc_transport.py +++ b/speech/google/cloud/speech_v1p1beta1/gapic/transports/speech_grpc_transport.py @@ -134,7 +134,9 @@ def long_running_recognize(self): Performs asynchronous speech recognition: receive results via the google.longrunning.Operations interface. Returns either an ``Operation.error`` or an ``Operation.response`` which contains a - ``LongRunningRecognizeResponse`` message. + ``LongRunningRecognizeResponse`` message. For more information on + asynchronous speech recognition, see the + `how-to `__. Returns: Callable: A callable which accepts the appropriate diff --git a/speech/google/cloud/speech_v1p1beta1/proto/cloud_speech.proto b/speech/google/cloud/speech_v1p1beta1/proto/cloud_speech.proto index 4ca4479a8ca8..7718b0eb3cf2 100644 --- a/speech/google/cloud/speech_v1p1beta1/proto/cloud_speech.proto +++ b/speech/google/cloud/speech_v1p1beta1/proto/cloud_speech.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,10 +18,11 @@ syntax = "proto3"; package google.cloud.speech.v1p1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/any.proto"; import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; @@ -30,9 +31,13 @@ option go_package = "google.golang.org/genproto/googleapis/cloud/speech/v1p1beta option java_multiple_files = true; option java_outer_classname = "SpeechProto"; option java_package = "com.google.cloud.speech.v1p1beta1"; +option objc_class_prefix = "GCS"; // Service that implements Google Cloud Speech API. service Speech { + option (google.api.default_host) = "speech.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Performs synchronous speech recognition: receive results after all audio // has been sent and processed. rpc Recognize(RecognizeRequest) returns (RecognizeResponse) { @@ -40,52 +45,59 @@ service Speech { post: "/v1p1beta1/speech:recognize" body: "*" }; + option (google.api.method_signature) = "config,audio"; } // Performs asynchronous speech recognition: receive results via the // google.longrunning.Operations interface. Returns either an // `Operation.error` or an `Operation.response` which contains // a `LongRunningRecognizeResponse` message. - rpc LongRunningRecognize(LongRunningRecognizeRequest) - returns (google.longrunning.Operation) { + // For more information on asynchronous speech recognition, see the + // [how-to](https://cloud.google.com/speech-to-text/docs/async-recognize). + rpc LongRunningRecognize(LongRunningRecognizeRequest) returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1p1beta1/speech:longrunningrecognize" body: "*" }; + option (google.api.method_signature) = "config,audio"; + option (google.longrunning.operation_info) = { + response_type: "LongRunningRecognizeResponse" + metadata_type: "LongRunningRecognizeMetadata" + }; } // Performs bidirectional streaming speech recognition: receive results while // sending audio. This method is only available via the gRPC API (not REST). - rpc StreamingRecognize(stream StreamingRecognizeRequest) - returns (stream StreamingRecognizeResponse) {} + rpc StreamingRecognize(stream StreamingRecognizeRequest) returns (stream StreamingRecognizeResponse) { + } } // The top-level message sent by the client for the `Recognize` method. message RecognizeRequest { - // *Required* Provides information to the recognizer that specifies how to + // Required. Provides information to the recognizer that specifies how to // process the request. - RecognitionConfig config = 1; + RecognitionConfig config = 1 [(google.api.field_behavior) = REQUIRED]; - // *Required* The audio data to be recognized. - RecognitionAudio audio = 2; + // Required. The audio data to be recognized. + RecognitionAudio audio = 2 [(google.api.field_behavior) = REQUIRED]; } // The top-level message sent by the client for the `LongRunningRecognize` // method. message LongRunningRecognizeRequest { - // *Required* Provides information to the recognizer that specifies how to + // Required. Provides information to the recognizer that specifies how to // process the request. - RecognitionConfig config = 1; + RecognitionConfig config = 1 [(google.api.field_behavior) = REQUIRED]; - // *Required* The audio data to be recognized. - RecognitionAudio audio = 2; + // Required. The audio data to be recognized. + RecognitionAudio audio = 2 [(google.api.field_behavior) = REQUIRED]; } // The top-level message sent by the client for the `StreamingRecognize` method. // Multiple `StreamingRecognizeRequest` messages are sent. The first message -// must contain a `streaming_config` message and must not contain `audio` data. -// All subsequent messages must contain `audio` data and must not contain a -// `streaming_config` message. +// must contain a `streaming_config` message and must not contain +// `audio_content`. All subsequent messages must contain `audio_content` and +// must not contain a `streaming_config` message. message StreamingRecognizeRequest { // The streaming request, which is either a streaming config or audio content. oneof streaming_request { @@ -99,9 +111,9 @@ message StreamingRecognizeRequest { // `StreamingRecognizeRequest` message must not contain `audio_content` data // and all subsequent `StreamingRecognizeRequest` messages must contain // `audio_content` data. The audio bytes must be encoded as specified in - // `RecognitionConfig`. Note: as with all bytes fields, protobuffers use a + // `RecognitionConfig`. Note: as with all bytes fields, proto buffers use a // pure binary representation (not base64). See - // [content limits](/speech-to-text/quotas#content). + // [content limits](https://cloud.google.com/speech-to-text/quotas#content). bytes audio_content = 2; } } @@ -109,11 +121,11 @@ message StreamingRecognizeRequest { // Provides information to the recognizer that specifies how to process the // request. message StreamingRecognitionConfig { - // *Required* Provides information to the recognizer that specifies how to + // Required. Provides information to the recognizer that specifies how to // process the request. - RecognitionConfig config = 1; + RecognitionConfig config = 1 [(google.api.field_behavior) = REQUIRED]; - // *Optional* If `false` or omitted, the recognizer will perform continuous + // If `false` or omitted, the recognizer will perform continuous // recognition (continuing to wait for and process audio even if the user // pauses speaking) until the client closes the input stream (gRPC API) or // until the maximum time limit has been reached. May return multiple @@ -126,7 +138,7 @@ message StreamingRecognitionConfig { // `true`. bool single_utterance = 2; - // *Optional* If `true`, interim results (tentative hypotheses) may be + // If `true`, interim results (tentative hypotheses) may be // returned as they become available (these interim results are indicated with // the `is_final=false` flag). // If `false` or omitted, only `is_final=true` result(s) are returned. @@ -138,13 +150,15 @@ message StreamingRecognitionConfig { message RecognitionConfig { // The encoding of the audio data sent in the request. // - // All encodings support only 1 channel (mono) audio. + // All encodings support only 1 channel (mono) audio, unless the + // `audio_channel_count` and `enable_separate_recognition_per_channel` fields + // are set. // // For best results, the audio source should be captured and transmitted using // a lossless encoding (`FLAC` or `LINEAR16`). The accuracy of the speech // recognition can be reduced if lossy codecs are used to capture or transmit // audio, particularly if background noise is present. Lossy codecs include - // `MULAW`, `AMR`, `AMR_WB`, `OGG_OPUS`, and `SPEEX_WITH_HEADER_BYTE`. + // `MULAW`, `AMR`, `AMR_WB`, `OGG_OPUS`, `SPEEX_WITH_HEADER_BYTE`, and `MP3`. // // The `FLAC` and `WAV` audio file formats include a header that describes the // included audio content. You can request recognition for `WAV` files that @@ -155,8 +169,7 @@ message RecognitionConfig { // an `AudioEncoding` when you send send `FLAC` or `WAV` audio, the // encoding configuration must match the encoding described in the audio // header; otherwise the request returns an - // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT] error - // code. + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT] error code. enum AudioEncoding { // Not specified. ENCODING_UNSPECIFIED = 0; @@ -209,8 +222,7 @@ message RecognitionConfig { // Encoding of audio data sent in all `RecognitionAudio` messages. // This field is optional for `FLAC` and `WAV` audio files and required - // for all other audio formats. For details, see - // [AudioEncoding][google.cloud.speech.v1p1beta1.RecognitionConfig.AudioEncoding]. + // for all other audio formats. For details, see [AudioEncoding][google.cloud.speech.v1p1beta1.RecognitionConfig.AudioEncoding]. AudioEncoding encoding = 1; // Sample rate in Hertz of the audio data sent in all @@ -218,12 +230,11 @@ message RecognitionConfig { // 16000 is optimal. For best results, set the sampling rate of the audio // source to 16000 Hz. If that's not possible, use the native sample rate of // the audio source (instead of re-sampling). - // This field is optional for `FLAC` and `WAV` audio files and required - // for all other audio formats. For details, see - // [AudioEncoding][google.cloud.speech.v1p1beta1.RecognitionConfig.AudioEncoding]. + // This field is optional for FLAC and WAV audio files, but is + // required for all other audio formats. For details, see [AudioEncoding][google.cloud.speech.v1p1beta1.RecognitionConfig.AudioEncoding]. int32 sample_rate_hertz = 2; - // *Optional* The number of channels in the input audio data. + // The number of channels in the input audio data. // ONLY set this for MULTI-CHANNEL recognition. // Valid values for LINEAR16 and FLAC are `1`-`8`. // Valid values for OGG_OPUS are '1'-'254'. @@ -234,7 +245,7 @@ message RecognitionConfig { // `enable_separate_recognition_per_channel` to 'true'. int32 audio_channel_count = 7; - // This needs to be set to ‘true’ explicitly and `audio_channel_count` > 1 + // This needs to be set to `true` explicitly and `audio_channel_count` > 1 // to get each channel recognized separately. The recognition result will // contain a `channel_tag` field to state which channel that result belongs // to. If this is not true, we will only recognize the first channel. The @@ -242,28 +253,29 @@ message RecognitionConfig { // `audio_channel_count` multiplied by the length of the audio. bool enable_separate_recognition_per_channel = 12; - // *Required* The language of the supplied audio as a + // Required. The language of the supplied audio as a // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. // Example: "en-US". - // See [Language Support](/speech-to-text/docs/languages) - // for a list of the currently supported language codes. - string language_code = 3; + // See [Language + // Support](https://cloud.google.com/speech-to-text/docs/languages) for a list + // of the currently supported language codes. + string language_code = 3 [(google.api.field_behavior) = REQUIRED]; - // *Optional* A list of up to 3 additional + // A list of up to 3 additional // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tags, // listing possible alternative languages of the supplied audio. - // See [Language Support](/speech-to-text/docs/languages) - // for a list of the currently supported language codes. - // If alternative languages are listed, recognition result will contain - // recognition in the most likely language detected including the main - // language_code. The recognition result will include the language tag - // of the language detected in the audio. - // Note: This feature is only supported for Voice Command and Voice Search - // use cases and performance may vary for other use cases (e.g., phone call + // See [Language + // Support](https://cloud.google.com/speech-to-text/docs/languages) for a list + // of the currently supported language codes. If alternative languages are + // listed, recognition result will contain recognition in the most likely + // language detected including the main language_code. The recognition result + // will include the language tag of the language detected in the audio. Note: + // This feature is only supported for Voice Command and Voice Search use cases + // and performance may vary for other use cases (e.g., phone call // transcription). repeated string alternative_language_codes = 18; - // *Optional* Maximum number of recognition hypotheses to be returned. + // Maximum number of recognition hypotheses to be returned. // Specifically, the maximum number of `SpeechRecognitionAlternative` messages // within each `SpeechRecognitionResult`. // The server may return fewer than `max_alternatives`. @@ -271,30 +283,31 @@ message RecognitionConfig { // one. If omitted, will return a maximum of one. int32 max_alternatives = 4; - // *Optional* If set to `true`, the server will attempt to filter out + // If set to `true`, the server will attempt to filter out // profanities, replacing all but the initial character in each filtered word // with asterisks, e.g. "f***". If set to `false` or omitted, profanities // won't be filtered out. bool profanity_filter = 5; - // *Optional* array of - // [SpeechContext][google.cloud.speech.v1p1beta1.SpeechContext]. A means to - // provide context to assist the speech recognition. For more information, see - // [Phrase Hints](/speech-to-text/docs/basics#phrase-hints). + // Array of [SpeechContext][google.cloud.speech.v1p1beta1.SpeechContext]. + // A means to provide context to assist the speech recognition. For more + // information, see + // [speech + // adaptation](https://cloud.google.com/speech-to-text/docs/context-strength). repeated SpeechContext speech_contexts = 6; - // *Optional* If `true`, the top result includes a list of words and + // If `true`, the top result includes a list of words and // the start and end time offsets (timestamps) for those words. If // `false`, no word-level time offset information is returned. The default is // `false`. bool enable_word_time_offsets = 8; - // *Optional* If `true`, the top result includes a list of words and the + // If `true`, the top result includes a list of words and the // confidence for those words. If `false`, no word-level confidence // information is returned. The default is `false`. bool enable_word_confidence = 15; - // *Optional* If 'true', adds punctuation to recognition result hypotheses. + // If 'true', adds punctuation to recognition result hypotheses. // This feature is only available in select languages. Setting this for // requests in other languages has no effect at all. // The default 'false' value does not add punctuation to result hypotheses. @@ -303,19 +316,18 @@ message RecognitionConfig { // premium feature. bool enable_automatic_punctuation = 11; - // *Optional* If 'true', enables speaker detection for each recognized word in + // If 'true', enables speaker detection for each recognized word in // the top alternative of the recognition result using a speaker_tag provided // in the WordInfo. // Note: Use diarization_config instead. bool enable_speaker_diarization = 16 [deprecated = true]; - // *Optional* // If set, specifies the estimated number of speakers in the conversation. // Defaults to '2'. Ignored unless enable_speaker_diarization is set to true. // Note: Use diarization_config instead. int32 diarization_speaker_count = 17 [deprecated = true]; - // *Optional* Config to enable speaker diarization and set additional + // Config to enable speaker diarization and set additional // parameters to make diarization better suited for your application. // Note: When this is enabled, we send all the words from the beginning of the // audio for the top alternative in every consecutive STREAMING responses. @@ -325,10 +337,10 @@ message RecognitionConfig { // in the top alternative of the FINAL SpeechRecognitionResult. SpeakerDiarizationConfig diarization_config = 19; - // *Optional* Metadata regarding this request. + // Metadata regarding this request. RecognitionMetadata metadata = 9; - // *Optional* Which model to select for the given request. Select the model + // Which model to select for the given request. Select the model // best suited to your domain to get best results. If a model is not // explicitly specified, then we auto-select a model based on the parameters // in the RecognitionConfig. @@ -362,7 +374,7 @@ message RecognitionConfig { // string model = 13; - // *Optional* Set to true to use an enhanced model for speech recognition. + // Set to true to use an enhanced model for speech recognition. // If `use_enhanced` is set to true and the `model` field is not set, then // an appropriate enhanced model is chosen if an enhanced model exists for // the audio. @@ -373,23 +385,18 @@ message RecognitionConfig { bool use_enhanced = 14; } -// *Optional* Config to enable speaker diarization. +// Config to enable speaker diarization. message SpeakerDiarizationConfig { - // *Optional* If 'true', enables speaker detection for each recognized word in + // If 'true', enables speaker detection for each recognized word in // the top alternative of the recognition result using a speaker_tag provided // in the WordInfo. bool enable_speaker_diarization = 1; - // Note: Set min_speaker_count = max_speaker_count to fix the number of - // speakers to be detected in the audio. - - // *Optional* // Minimum number of speakers in the conversation. This range gives you more // flexibility by allowing the system to automatically determine the correct // number of speakers. If not set, the default value is 2. int32 min_speaker_count = 2; - // *Optional* // Maximum number of speakers in the conversation. This range gives you more // flexibility by allowing the system to automatically determine the correct // number of speakers. If not set, the default value is 6. @@ -520,7 +527,7 @@ message RecognitionMetadata { // Obfuscated (privacy-protected) ID of the user, to identify number of // unique users using the service. - int64 obfuscated_id = 9; + int64 obfuscated_id = 9 [deprecated = true]; // Description of the content. Eg. "Recordings of federal supreme court // hearings from 2012". @@ -530,12 +537,12 @@ message RecognitionMetadata { // Provides "hints" to the speech recognizer to favor specific words and phrases // in the results. message SpeechContext { - // *Optional* A list of strings containing words and phrases "hints" so that + // A list of strings containing words and phrases "hints" so that // the speech recognition is more likely to recognize them. This can be used // to improve the accuracy for specific words and phrases, for example, if // specific commands are typically spoken by the user. This can also be used // to add additional words to the vocabulary of the recognizer. See - // [usage limits](/speech-to-text/quotas#content). + // [usage limits](https://cloud.google.com/speech-to-text/quotas#content). // // List items can also be set to classes for groups of words that represent // common concepts that occur in natural language. For example, rather than @@ -557,14 +564,14 @@ message SpeechContext { // Contains audio data in the encoding specified in the `RecognitionConfig`. // Either `content` or `uri` must be supplied. Supplying both or neither -// returns [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. -// See [content limits](/speech-to-text/quotas#content). +// returns [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]. See +// [content limits](https://cloud.google.com/speech-to-text/quotas#content). message RecognitionAudio { // The audio source, which is either inline content or a Google Cloud // Storage uri. oneof audio_source { // The audio data bytes encoded as specified in - // `RecognitionConfig`. Note: as with all bytes fields, protobuffers use a + // `RecognitionConfig`. Note: as with all bytes fields, proto buffers use a // pure binary representation, whereas JSON representations use base64. bytes content = 1; @@ -573,9 +580,8 @@ message RecognitionAudio { // Currently, only Google Cloud Storage URIs are // supported, which must be specified in the following format: // `gs://bucket_name/object_name` (other URI formats return - // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). - // For more information, see [Request - // URIs](https://cloud.google.com/storage/docs/reference-uris). + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see + // [Request URIs](https://cloud.google.com/storage/docs/reference-uris). string uri = 2; } } @@ -584,7 +590,7 @@ message RecognitionAudio { // contains the result as zero or more sequential `SpeechRecognitionResult` // messages. message RecognizeResponse { - // Output only. Sequential list of transcription results corresponding to + // Sequential list of transcription results corresponding to // sequential portions of audio. repeated SpeechRecognitionResult results = 2; } @@ -595,7 +601,7 @@ message RecognizeResponse { // returned by the `GetOperation` call of the `google::longrunning::Operations` // service. message LongRunningRecognizeResponse { - // Output only. Sequential list of transcription results corresponding to + // Sequential list of transcription results corresponding to // sequential portions of audio. repeated SpeechRecognitionResult results = 2; } @@ -680,44 +686,44 @@ message StreamingRecognizeResponse { END_OF_SINGLE_UTTERANCE = 1; } - // Output only. If set, returns a [google.rpc.Status][google.rpc.Status] - // message that specifies the error for the operation. + // If set, returns a [google.rpc.Status][google.rpc.Status] message that + // specifies the error for the operation. google.rpc.Status error = 1; - // Output only. This repeated list contains zero or more results that + // This repeated list contains zero or more results that // correspond to consecutive portions of the audio currently being processed. // It contains zero or one `is_final=true` result (the newly settled portion), // followed by zero or more `is_final=false` results (the interim results). repeated StreamingRecognitionResult results = 2; - // Output only. Indicates the type of speech event. + // Indicates the type of speech event. SpeechEventType speech_event_type = 4; } // A streaming speech recognition result corresponding to a portion of the audio // that is currently being processed. message StreamingRecognitionResult { - // Output only. May contain one or more recognition hypotheses (up to the + // May contain one or more recognition hypotheses (up to the // maximum specified in `max_alternatives`). // These alternatives are ordered in terms of accuracy, with the top (first) // alternative being the most probable, as ranked by the recognizer. repeated SpeechRecognitionAlternative alternatives = 1; - // Output only. If `false`, this `StreamingRecognitionResult` represents an + // If `false`, this `StreamingRecognitionResult` represents an // interim result that may change. If `true`, this is the final time the // speech service will return this particular `StreamingRecognitionResult`, // the recognizer will not return any further hypotheses for this portion of // the transcript and corresponding audio. bool is_final = 2; - // Output only. An estimate of the likelihood that the recognizer will not + // An estimate of the likelihood that the recognizer will not // change its guess about this interim result. Values range from 0.0 // (completely unstable) to 1.0 (completely stable). // This field is only provided for interim results (`is_final=false`). // The default of 0.0 is a sentinel value indicating `stability` was not set. float stability = 3; - // Output only. Time offset of the end of this result relative to the + // Time offset of the end of this result relative to the // beginning of the audio. google.protobuf.Duration result_end_time = 4; @@ -726,16 +732,15 @@ message StreamingRecognitionResult { // For audio_channel_count = N, its output values can range from '1' to 'N'. int32 channel_tag = 5; - // Output only. The - // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the - // language in this result. This language code was detected to have the most - // likelihood of being spoken in the audio. + // The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag + // of the language in this result. This language code was detected to have + // the most likelihood of being spoken in the audio. string language_code = 6; } // A speech recognition result corresponding to a portion of the audio. message SpeechRecognitionResult { - // Output only. May contain one or more recognition hypotheses (up to the + // May contain one or more recognition hypotheses (up to the // maximum specified in `max_alternatives`). // These alternatives are ordered in terms of accuracy, with the top (first) // alternative being the most probable, as ranked by the recognizer. @@ -746,19 +751,18 @@ message SpeechRecognitionResult { // For audio_channel_count = N, its output values can range from '1' to 'N'. int32 channel_tag = 2; - // Output only. The - // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the - // language in this result. This language code was detected to have the most - // likelihood of being spoken in the audio. + // The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag + // of the language in this result. This language code was detected to have + // the most likelihood of being spoken in the audio. string language_code = 5; } // Alternative hypotheses (a.k.a. n-best list). message SpeechRecognitionAlternative { - // Output only. Transcript text representing the words that the user spoke. + // Transcript text representing the words that the user spoke. string transcript = 1; - // Output only. The confidence estimate between 0.0 and 1.0. A higher number + // The confidence estimate between 0.0 and 1.0. A higher number // indicates an estimated greater likelihood that the recognized words are // correct. This field is set only for the top alternative of a non-streaming // result or, of a streaming result where `is_final=true`. @@ -767,7 +771,7 @@ message SpeechRecognitionAlternative { // The default of 0.0 is a sentinel value indicating `confidence` was not set. float confidence = 2; - // Output only. A list of word-specific information for each recognized word. + // A list of word-specific information for each recognized word. // Note: When `enable_speaker_diarization` is true, you will see all the words // from the beginning of the audio. repeated WordInfo words = 3; @@ -775,7 +779,7 @@ message SpeechRecognitionAlternative { // Word-specific information for recognized words. message WordInfo { - // Output only. Time offset relative to the beginning of the audio, + // Time offset relative to the beginning of the audio, // and corresponding to the start of the spoken word. // This field is only set if `enable_word_time_offsets=true` and only // in the top hypothesis. @@ -783,7 +787,7 @@ message WordInfo { // vary. google.protobuf.Duration start_time = 1; - // Output only. Time offset relative to the beginning of the audio, + // Time offset relative to the beginning of the audio, // and corresponding to the end of the spoken word. // This field is only set if `enable_word_time_offsets=true` and only // in the top hypothesis. @@ -791,10 +795,10 @@ message WordInfo { // vary. google.protobuf.Duration end_time = 2; - // Output only. The word corresponding to this set of information. + // The word corresponding to this set of information. string word = 3; - // Output only. The confidence estimate between 0.0 and 1.0. A higher number + // The confidence estimate between 0.0 and 1.0. A higher number // indicates an estimated greater likelihood that the recognized words are // correct. This field is set only for the top alternative of a non-streaming // result or, of a streaming result where `is_final=true`. @@ -803,7 +807,7 @@ message WordInfo { // The default of 0.0 is a sentinel value indicating `confidence` was not set. float confidence = 4; - // Output only. A distinct integer value is assigned for every speaker within + // A distinct integer value is assigned for every speaker within // the audio. This field specifies which one of those speakers was detected to // have spoken this word. Value ranges from '1' to diarization_speaker_count. // speaker_tag is set if enable_speaker_diarization = 'true' and only in the diff --git a/speech/google/cloud/speech_v1p1beta1/proto/cloud_speech_pb2.py b/speech/google/cloud/speech_v1p1beta1/proto/cloud_speech_pb2.py index 9062c3b8dbb2..3ce28e9c2a46 100644 --- a/speech/google/cloud/speech_v1p1beta1/proto/cloud_speech_pb2.py +++ b/speech/google/cloud/speech_v1p1beta1/proto/cloud_speech_pb2.py @@ -16,12 +16,13 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.longrunning import ( operations_pb2 as google_dot_longrunning_dot_operations__pb2, ) from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 @@ -31,17 +32,18 @@ package="google.cloud.speech.v1p1beta1", syntax="proto3", serialized_options=_b( - "\n!com.google.cloud.speech.v1p1beta1B\013SpeechProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/speech/v1p1beta1;speech\370\001\001" + "\n!com.google.cloud.speech.v1p1beta1B\013SpeechProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/speech/v1p1beta1;speech\370\001\001\242\002\003GCS" ), serialized_pb=_b( - '\n6google/cloud/speech_v1p1beta1/proto/cloud_speech.proto\x12\x1dgoogle.cloud.speech.v1p1beta1\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\x94\x01\n\x10RecognizeRequest\x12@\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x30.google.cloud.speech.v1p1beta1.RecognitionConfig\x12>\n\x05\x61udio\x18\x02 \x01(\x0b\x32/.google.cloud.speech.v1p1beta1.RecognitionAudio"\x9f\x01\n\x1bLongRunningRecognizeRequest\x12@\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x30.google.cloud.speech.v1p1beta1.RecognitionConfig\x12>\n\x05\x61udio\x18\x02 \x01(\x0b\x32/.google.cloud.speech.v1p1beta1.RecognitionAudio"\xa0\x01\n\x19StreamingRecognizeRequest\x12U\n\x10streaming_config\x18\x01 \x01(\x0b\x32\x39.google.cloud.speech.v1p1beta1.StreamingRecognitionConfigH\x00\x12\x17\n\raudio_content\x18\x02 \x01(\x0cH\x00\x42\x13\n\x11streaming_request"\x91\x01\n\x1aStreamingRecognitionConfig\x12@\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x30.google.cloud.speech.v1p1beta1.RecognitionConfig\x12\x18\n\x10single_utterance\x18\x02 \x01(\x08\x12\x17\n\x0finterim_results\x18\x03 \x01(\x08"\x92\x07\n\x11RecognitionConfig\x12P\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32>.google.cloud.speech.v1p1beta1.RecognitionConfig.AudioEncoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\x12\x1b\n\x13\x61udio_channel_count\x18\x07 \x01(\x05\x12/\n\'enable_separate_recognition_per_channel\x18\x0c \x01(\x08\x12\x15\n\rlanguage_code\x18\x03 \x01(\t\x12"\n\x1a\x61lternative_language_codes\x18\x12 \x03(\t\x12\x18\n\x10max_alternatives\x18\x04 \x01(\x05\x12\x18\n\x10profanity_filter\x18\x05 \x01(\x08\x12\x45\n\x0fspeech_contexts\x18\x06 \x03(\x0b\x32,.google.cloud.speech.v1p1beta1.SpeechContext\x12 \n\x18\x65nable_word_time_offsets\x18\x08 \x01(\x08\x12\x1e\n\x16\x65nable_word_confidence\x18\x0f \x01(\x08\x12$\n\x1c\x65nable_automatic_punctuation\x18\x0b \x01(\x08\x12&\n\x1a\x65nable_speaker_diarization\x18\x10 \x01(\x08\x42\x02\x18\x01\x12%\n\x19\x64iarization_speaker_count\x18\x11 \x01(\x05\x42\x02\x18\x01\x12S\n\x12\x64iarization_config\x18\x13 \x01(\x0b\x32\x37.google.cloud.speech.v1p1beta1.SpeakerDiarizationConfig\x12\x44\n\x08metadata\x18\t \x01(\x0b\x32\x32.google.cloud.speech.v1p1beta1.RecognitionMetadata\x12\r\n\x05model\x18\r \x01(\t\x12\x14\n\x0cuse_enhanced\x18\x0e \x01(\x08"\x94\x01\n\rAudioEncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x08\n\x04\x46LAC\x10\x02\x12\t\n\x05MULAW\x10\x03\x12\x07\n\x03\x41MR\x10\x04\x12\n\n\x06\x41MR_WB\x10\x05\x12\x0c\n\x08OGG_OPUS\x10\x06\x12\x1a\n\x16SPEEX_WITH_HEADER_BYTE\x10\x07\x12\x07\n\x03MP3\x10\x08"t\n\x18SpeakerDiarizationConfig\x12"\n\x1a\x65nable_speaker_diarization\x18\x01 \x01(\x08\x12\x19\n\x11min_speaker_count\x18\x02 \x01(\x05\x12\x19\n\x11max_speaker_count\x18\x03 \x01(\x05"\xd3\x08\n\x13RecognitionMetadata\x12\\\n\x10interaction_type\x18\x01 \x01(\x0e\x32\x42.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType\x12$\n\x1cindustry_naics_code_of_audio\x18\x03 \x01(\r\x12\x62\n\x13microphone_distance\x18\x04 \x01(\x0e\x32\x45.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance\x12\x61\n\x13original_media_type\x18\x05 \x01(\x0e\x32\x44.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType\x12\x65\n\x15recording_device_type\x18\x06 \x01(\x0e\x32\x46.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType\x12\x1d\n\x15recording_device_name\x18\x07 \x01(\t\x12\x1a\n\x12original_mime_type\x18\x08 \x01(\t\x12\x15\n\robfuscated_id\x18\t \x01(\x03\x12\x13\n\x0b\x61udio_topic\x18\n \x01(\t"\xc5\x01\n\x0fInteractionType\x12 \n\x1cINTERACTION_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nDISCUSSION\x10\x01\x12\x10\n\x0cPRESENTATION\x10\x02\x12\x0e\n\nPHONE_CALL\x10\x03\x12\r\n\tVOICEMAIL\x10\x04\x12\x1b\n\x17PROFESSIONALLY_PRODUCED\x10\x05\x12\x10\n\x0cVOICE_SEARCH\x10\x06\x12\x11\n\rVOICE_COMMAND\x10\x07\x12\r\n\tDICTATION\x10\x08"d\n\x12MicrophoneDistance\x12#\n\x1fMICROPHONE_DISTANCE_UNSPECIFIED\x10\x00\x12\r\n\tNEARFIELD\x10\x01\x12\x0c\n\x08MIDFIELD\x10\x02\x12\x0c\n\x08\x46\x41RFIELD\x10\x03"N\n\x11OriginalMediaType\x12#\n\x1fORIGINAL_MEDIA_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x41UDIO\x10\x01\x12\t\n\x05VIDEO\x10\x02"\xa4\x01\n\x13RecordingDeviceType\x12%\n!RECORDING_DEVICE_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nSMARTPHONE\x10\x01\x12\x06\n\x02PC\x10\x02\x12\x0e\n\nPHONE_LINE\x10\x03\x12\x0b\n\x07VEHICLE\x10\x04\x12\x18\n\x14OTHER_OUTDOOR_DEVICE\x10\x05\x12\x17\n\x13OTHER_INDOOR_DEVICE\x10\x06"/\n\rSpeechContext\x12\x0f\n\x07phrases\x18\x01 \x03(\t\x12\r\n\x05\x62oost\x18\x04 \x01(\x02"D\n\x10RecognitionAudio\x12\x11\n\x07\x63ontent\x18\x01 \x01(\x0cH\x00\x12\r\n\x03uri\x18\x02 \x01(\tH\x00\x42\x0e\n\x0c\x61udio_source"\\\n\x11RecognizeResponse\x12G\n\x07results\x18\x02 \x03(\x0b\x32\x36.google.cloud.speech.v1p1beta1.SpeechRecognitionResult"g\n\x1cLongRunningRecognizeResponse\x12G\n\x07results\x18\x02 \x03(\x0b\x32\x36.google.cloud.speech.v1p1beta1.SpeechRecognitionResult"\x9e\x01\n\x1cLongRunningRecognizeMetadata\x12\x18\n\x10progress_percent\x18\x01 \x01(\x05\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10last_update_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xbf\x02\n\x1aStreamingRecognizeResponse\x12!\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.Status\x12J\n\x07results\x18\x02 \x03(\x0b\x32\x39.google.cloud.speech.v1p1beta1.StreamingRecognitionResult\x12\x64\n\x11speech_event_type\x18\x04 \x01(\x0e\x32I.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse.SpeechEventType"L\n\x0fSpeechEventType\x12\x1c\n\x18SPEECH_EVENT_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x45ND_OF_SINGLE_UTTERANCE\x10\x01"\xf4\x01\n\x1aStreamingRecognitionResult\x12Q\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32;.google.cloud.speech.v1p1beta1.SpeechRecognitionAlternative\x12\x10\n\x08is_final\x18\x02 \x01(\x08\x12\x11\n\tstability\x18\x03 \x01(\x02\x12\x32\n\x0fresult_end_time\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x13\n\x0b\x63hannel_tag\x18\x05 \x01(\x05\x12\x15\n\rlanguage_code\x18\x06 \x01(\t"\x98\x01\n\x17SpeechRecognitionResult\x12Q\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32;.google.cloud.speech.v1p1beta1.SpeechRecognitionAlternative\x12\x13\n\x0b\x63hannel_tag\x18\x02 \x01(\x05\x12\x15\n\rlanguage_code\x18\x05 \x01(\t"~\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\x36\n\x05words\x18\x03 \x03(\x0b\x32\'.google.cloud.speech.v1p1beta1.WordInfo"\x9d\x01\n\x08WordInfo\x12-\n\nstart_time\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12+\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0c\n\x04word\x18\x03 \x01(\t\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x12\x13\n\x0bspeaker_tag\x18\x05 \x01(\x05\x32\xda\x03\n\x06Speech\x12\x96\x01\n\tRecognize\x12/.google.cloud.speech.v1p1beta1.RecognizeRequest\x1a\x30.google.cloud.speech.v1p1beta1.RecognizeResponse"&\x82\xd3\xe4\x93\x02 "\x1b/v1p1beta1/speech:recognize:\x01*\x12\xa4\x01\n\x14LongRunningRecognize\x12:.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest\x1a\x1d.google.longrunning.Operation"1\x82\xd3\xe4\x93\x02+"&/v1p1beta1/speech:longrunningrecognize:\x01*\x12\x8f\x01\n\x12StreamingRecognize\x12\x38.google.cloud.speech.v1p1beta1.StreamingRecognizeRequest\x1a\x39.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse"\x00(\x01\x30\x01\x42z\n!com.google.cloud.speech.v1p1beta1B\x0bSpeechProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/speech/v1p1beta1;speech\xf8\x01\x01\x62\x06proto3' + '\n6google/cloud/speech_v1p1beta1/proto/cloud_speech.proto\x12\x1dgoogle.cloud.speech.v1p1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a#google/longrunning/operations.proto\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\x9e\x01\n\x10RecognizeRequest\x12\x45\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x30.google.cloud.speech.v1p1beta1.RecognitionConfigB\x03\xe0\x41\x02\x12\x43\n\x05\x61udio\x18\x02 \x01(\x0b\x32/.google.cloud.speech.v1p1beta1.RecognitionAudioB\x03\xe0\x41\x02"\xa9\x01\n\x1bLongRunningRecognizeRequest\x12\x45\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x30.google.cloud.speech.v1p1beta1.RecognitionConfigB\x03\xe0\x41\x02\x12\x43\n\x05\x61udio\x18\x02 \x01(\x0b\x32/.google.cloud.speech.v1p1beta1.RecognitionAudioB\x03\xe0\x41\x02"\xa0\x01\n\x19StreamingRecognizeRequest\x12U\n\x10streaming_config\x18\x01 \x01(\x0b\x32\x39.google.cloud.speech.v1p1beta1.StreamingRecognitionConfigH\x00\x12\x17\n\raudio_content\x18\x02 \x01(\x0cH\x00\x42\x13\n\x11streaming_request"\x96\x01\n\x1aStreamingRecognitionConfig\x12\x45\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x30.google.cloud.speech.v1p1beta1.RecognitionConfigB\x03\xe0\x41\x02\x12\x18\n\x10single_utterance\x18\x02 \x01(\x08\x12\x17\n\x0finterim_results\x18\x03 \x01(\x08"\x97\x07\n\x11RecognitionConfig\x12P\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32>.google.cloud.speech.v1p1beta1.RecognitionConfig.AudioEncoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\x12\x1b\n\x13\x61udio_channel_count\x18\x07 \x01(\x05\x12/\n\'enable_separate_recognition_per_channel\x18\x0c \x01(\x08\x12\x1a\n\rlanguage_code\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12"\n\x1a\x61lternative_language_codes\x18\x12 \x03(\t\x12\x18\n\x10max_alternatives\x18\x04 \x01(\x05\x12\x18\n\x10profanity_filter\x18\x05 \x01(\x08\x12\x45\n\x0fspeech_contexts\x18\x06 \x03(\x0b\x32,.google.cloud.speech.v1p1beta1.SpeechContext\x12 \n\x18\x65nable_word_time_offsets\x18\x08 \x01(\x08\x12\x1e\n\x16\x65nable_word_confidence\x18\x0f \x01(\x08\x12$\n\x1c\x65nable_automatic_punctuation\x18\x0b \x01(\x08\x12&\n\x1a\x65nable_speaker_diarization\x18\x10 \x01(\x08\x42\x02\x18\x01\x12%\n\x19\x64iarization_speaker_count\x18\x11 \x01(\x05\x42\x02\x18\x01\x12S\n\x12\x64iarization_config\x18\x13 \x01(\x0b\x32\x37.google.cloud.speech.v1p1beta1.SpeakerDiarizationConfig\x12\x44\n\x08metadata\x18\t \x01(\x0b\x32\x32.google.cloud.speech.v1p1beta1.RecognitionMetadata\x12\r\n\x05model\x18\r \x01(\t\x12\x14\n\x0cuse_enhanced\x18\x0e \x01(\x08"\x94\x01\n\rAudioEncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x08\n\x04\x46LAC\x10\x02\x12\t\n\x05MULAW\x10\x03\x12\x07\n\x03\x41MR\x10\x04\x12\n\n\x06\x41MR_WB\x10\x05\x12\x0c\n\x08OGG_OPUS\x10\x06\x12\x1a\n\x16SPEEX_WITH_HEADER_BYTE\x10\x07\x12\x07\n\x03MP3\x10\x08"t\n\x18SpeakerDiarizationConfig\x12"\n\x1a\x65nable_speaker_diarization\x18\x01 \x01(\x08\x12\x19\n\x11min_speaker_count\x18\x02 \x01(\x05\x12\x19\n\x11max_speaker_count\x18\x03 \x01(\x05"\xd7\x08\n\x13RecognitionMetadata\x12\\\n\x10interaction_type\x18\x01 \x01(\x0e\x32\x42.google.cloud.speech.v1p1beta1.RecognitionMetadata.InteractionType\x12$\n\x1cindustry_naics_code_of_audio\x18\x03 \x01(\r\x12\x62\n\x13microphone_distance\x18\x04 \x01(\x0e\x32\x45.google.cloud.speech.v1p1beta1.RecognitionMetadata.MicrophoneDistance\x12\x61\n\x13original_media_type\x18\x05 \x01(\x0e\x32\x44.google.cloud.speech.v1p1beta1.RecognitionMetadata.OriginalMediaType\x12\x65\n\x15recording_device_type\x18\x06 \x01(\x0e\x32\x46.google.cloud.speech.v1p1beta1.RecognitionMetadata.RecordingDeviceType\x12\x1d\n\x15recording_device_name\x18\x07 \x01(\t\x12\x1a\n\x12original_mime_type\x18\x08 \x01(\t\x12\x19\n\robfuscated_id\x18\t \x01(\x03\x42\x02\x18\x01\x12\x13\n\x0b\x61udio_topic\x18\n \x01(\t"\xc5\x01\n\x0fInteractionType\x12 \n\x1cINTERACTION_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nDISCUSSION\x10\x01\x12\x10\n\x0cPRESENTATION\x10\x02\x12\x0e\n\nPHONE_CALL\x10\x03\x12\r\n\tVOICEMAIL\x10\x04\x12\x1b\n\x17PROFESSIONALLY_PRODUCED\x10\x05\x12\x10\n\x0cVOICE_SEARCH\x10\x06\x12\x11\n\rVOICE_COMMAND\x10\x07\x12\r\n\tDICTATION\x10\x08"d\n\x12MicrophoneDistance\x12#\n\x1fMICROPHONE_DISTANCE_UNSPECIFIED\x10\x00\x12\r\n\tNEARFIELD\x10\x01\x12\x0c\n\x08MIDFIELD\x10\x02\x12\x0c\n\x08\x46\x41RFIELD\x10\x03"N\n\x11OriginalMediaType\x12#\n\x1fORIGINAL_MEDIA_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x41UDIO\x10\x01\x12\t\n\x05VIDEO\x10\x02"\xa4\x01\n\x13RecordingDeviceType\x12%\n!RECORDING_DEVICE_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nSMARTPHONE\x10\x01\x12\x06\n\x02PC\x10\x02\x12\x0e\n\nPHONE_LINE\x10\x03\x12\x0b\n\x07VEHICLE\x10\x04\x12\x18\n\x14OTHER_OUTDOOR_DEVICE\x10\x05\x12\x17\n\x13OTHER_INDOOR_DEVICE\x10\x06"/\n\rSpeechContext\x12\x0f\n\x07phrases\x18\x01 \x03(\t\x12\r\n\x05\x62oost\x18\x04 \x01(\x02"D\n\x10RecognitionAudio\x12\x11\n\x07\x63ontent\x18\x01 \x01(\x0cH\x00\x12\r\n\x03uri\x18\x02 \x01(\tH\x00\x42\x0e\n\x0c\x61udio_source"\\\n\x11RecognizeResponse\x12G\n\x07results\x18\x02 \x03(\x0b\x32\x36.google.cloud.speech.v1p1beta1.SpeechRecognitionResult"g\n\x1cLongRunningRecognizeResponse\x12G\n\x07results\x18\x02 \x03(\x0b\x32\x36.google.cloud.speech.v1p1beta1.SpeechRecognitionResult"\x9e\x01\n\x1cLongRunningRecognizeMetadata\x12\x18\n\x10progress_percent\x18\x01 \x01(\x05\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10last_update_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xbf\x02\n\x1aStreamingRecognizeResponse\x12!\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.Status\x12J\n\x07results\x18\x02 \x03(\x0b\x32\x39.google.cloud.speech.v1p1beta1.StreamingRecognitionResult\x12\x64\n\x11speech_event_type\x18\x04 \x01(\x0e\x32I.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse.SpeechEventType"L\n\x0fSpeechEventType\x12\x1c\n\x18SPEECH_EVENT_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x45ND_OF_SINGLE_UTTERANCE\x10\x01"\xf4\x01\n\x1aStreamingRecognitionResult\x12Q\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32;.google.cloud.speech.v1p1beta1.SpeechRecognitionAlternative\x12\x10\n\x08is_final\x18\x02 \x01(\x08\x12\x11\n\tstability\x18\x03 \x01(\x02\x12\x32\n\x0fresult_end_time\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x13\n\x0b\x63hannel_tag\x18\x05 \x01(\x05\x12\x15\n\rlanguage_code\x18\x06 \x01(\t"\x98\x01\n\x17SpeechRecognitionResult\x12Q\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32;.google.cloud.speech.v1p1beta1.SpeechRecognitionAlternative\x12\x13\n\x0b\x63hannel_tag\x18\x02 \x01(\x05\x12\x15\n\rlanguage_code\x18\x05 \x01(\t"~\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\x36\n\x05words\x18\x03 \x03(\x0b\x32\'.google.cloud.speech.v1p1beta1.WordInfo"\x9d\x01\n\x08WordInfo\x12-\n\nstart_time\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12+\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0c\n\x04word\x18\x03 \x01(\t\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x12\x13\n\x0bspeaker_tag\x18\x05 \x01(\x05\x32\x82\x05\n\x06Speech\x12\xa5\x01\n\tRecognize\x12/.google.cloud.speech.v1p1beta1.RecognizeRequest\x1a\x30.google.cloud.speech.v1p1beta1.RecognizeResponse"5\x82\xd3\xe4\x93\x02 "\x1b/v1p1beta1/speech:recognize:\x01*\xda\x41\x0c\x63onfig,audio\x12\xf2\x01\n\x14LongRunningRecognize\x12:.google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest\x1a\x1d.google.longrunning.Operation"\x7f\x82\xd3\xe4\x93\x02+"&/v1p1beta1/speech:longrunningrecognize:\x01*\xda\x41\x0c\x63onfig,audio\xca\x41<\n\x1cLongRunningRecognizeResponse\x12\x1cLongRunningRecognizeMetadata\x12\x8f\x01\n\x12StreamingRecognize\x12\x38.google.cloud.speech.v1p1beta1.StreamingRecognizeRequest\x1a\x39.google.cloud.speech.v1p1beta1.StreamingRecognizeResponse"\x00(\x01\x30\x01\x1aI\xca\x41\x15speech.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x80\x01\n!com.google.cloud.speech.v1p1beta1B\x0bSpeechProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/speech/v1p1beta1;speech\xf8\x01\x01\xa2\x02\x03GCSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_any__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_rpc_dot_status__pb2.DESCRIPTOR, ], @@ -92,8 +94,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1693, - serialized_end=1841, + serialized_start=1752, + serialized_end=1900, ) _sym_db.RegisterEnumDescriptor(_RECOGNITIONCONFIG_AUDIOENCODING) @@ -141,8 +143,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2523, - serialized_end=2720, + serialized_start=2586, + serialized_end=2783, ) _sym_db.RegisterEnumDescriptor(_RECOGNITIONMETADATA_INTERACTIONTYPE) @@ -171,8 +173,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2722, - serialized_end=2822, + serialized_start=2785, + serialized_end=2885, ) _sym_db.RegisterEnumDescriptor(_RECOGNITIONMETADATA_MICROPHONEDISTANCE) @@ -198,8 +200,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2824, - serialized_end=2902, + serialized_start=2887, + serialized_end=2965, ) _sym_db.RegisterEnumDescriptor(_RECOGNITIONMETADATA_ORIGINALMEDIATYPE) @@ -245,8 +247,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2905, - serialized_end=3069, + serialized_start=2968, + serialized_end=3132, ) _sym_db.RegisterEnumDescriptor(_RECOGNITIONMETADATA_RECORDINGDEVICETYPE) @@ -273,8 +275,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3794, - serialized_end=3870, + serialized_start=3857, + serialized_end=3933, ) _sym_db.RegisterEnumDescriptor(_STREAMINGRECOGNIZERESPONSE_SPEECHEVENTTYPE) @@ -301,7 +303,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -319,7 +321,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -331,8 +333,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=303, - serialized_end=451, + serialized_start=332, + serialized_end=490, ) @@ -358,7 +360,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -376,7 +378,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -388,8 +390,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=454, - serialized_end=613, + serialized_start=493, + serialized_end=662, ) @@ -453,8 +455,8 @@ fields=[], ) ], - serialized_start=616, - serialized_end=776, + serialized_start=665, + serialized_end=825, ) @@ -480,7 +482,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -528,8 +530,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=779, - serialized_end=924, + serialized_start=828, + serialized_end=978, ) @@ -627,7 +629,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -873,8 +875,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=927, - serialized_end=1841, + serialized_start=981, + serialized_end=1900, ) @@ -948,8 +950,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1843, - serialized_end=1959, + serialized_start=1902, + serialized_end=2018, ) @@ -1101,7 +1103,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\030\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1136,8 +1138,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1962, - serialized_end=3069, + serialized_start=2021, + serialized_end=3132, ) @@ -1193,8 +1195,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3071, - serialized_end=3118, + serialized_start=3134, + serialized_end=3181, ) @@ -1258,8 +1260,8 @@ fields=[], ) ], - serialized_start=3120, - serialized_end=3188, + serialized_start=3183, + serialized_end=3251, ) @@ -1297,8 +1299,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3190, - serialized_end=3282, + serialized_start=3253, + serialized_end=3345, ) @@ -1336,8 +1338,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3284, - serialized_end=3387, + serialized_start=3347, + serialized_end=3450, ) @@ -1411,8 +1413,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3390, - serialized_end=3548, + serialized_start=3453, + serialized_end=3611, ) @@ -1486,8 +1488,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3551, - serialized_end=3870, + serialized_start=3614, + serialized_end=3933, ) @@ -1615,8 +1617,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3873, - serialized_end=4117, + serialized_start=3936, + serialized_end=4180, ) @@ -1690,8 +1692,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4120, - serialized_end=4272, + serialized_start=4183, + serialized_end=4335, ) @@ -1765,8 +1767,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4274, - serialized_end=4400, + serialized_start=4337, + serialized_end=4463, ) @@ -1876,8 +1878,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4403, - serialized_end=4560, + serialized_start=4466, + serialized_end=4623, ) _RECOGNIZEREQUEST.fields_by_name["config"].message_type = _RECOGNITIONCONFIG @@ -2021,10 +2023,10 @@ Attributes: config: - *Required* Provides information to the recognizer that + Required. Provides information to the recognizer that specifies how to process the request. audio: - *Required* The audio data to be recognized. + Required. The audio data to be recognized. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.RecognizeRequest) ), @@ -2043,10 +2045,10 @@ Attributes: config: - *Required* Provides information to the recognizer that + Required. Provides information to the recognizer that specifies how to process the request. audio: - *Required* The audio data to be recognized. + Required. The audio data to be recognized. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.LongRunningRecognizeRequest) ), @@ -2062,8 +2064,8 @@ __doc__="""The top-level message sent by the client for the ``StreamingRecognize`` method. Multiple ``StreamingRecognizeRequest`` messages are sent. The first message must contain a ``streaming_config`` message and must not - contain ``audio`` data. All subsequent messages must contain ``audio`` - data and must not contain a ``streaming_config`` message. + contain ``audio_content``. All subsequent messages must contain + ``audio_content`` and must not contain a ``streaming_config`` message. Attributes: @@ -2082,9 +2084,9 @@ ``StreamingRecognizeRequest`` messages must contain ``audio_content`` data. The audio bytes must be encoded as specified in ``RecognitionConfig``. Note: as with all bytes - fields, protobuffers use a pure binary representation (not - base64). See `content limits `__. + fields, proto buffers use a pure binary representation (not + base64). See `content limits `__. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.StreamingRecognizeRequest) ), @@ -2103,14 +2105,14 @@ Attributes: config: - *Required* Provides information to the recognizer that + Required. Provides information to the recognizer that specifies how to process the request. single_utterance: - *Optional* If ``false`` or omitted, the recognizer will - perform continuous recognition (continuing to wait for and - process audio even if the user pauses speaking) until the - client closes the input stream (gRPC API) or until the maximum - time limit has been reached. May return multiple + If ``false`` or omitted, the recognizer will perform + continuous recognition (continuing to wait for and process + audio even if the user pauses speaking) until the client + closes the input stream (gRPC API) or until the maximum time + limit has been reached. May return multiple ``StreamingRecognitionResult``\ s with the ``is_final`` flag set to ``true``. If ``true``, the recognizer will detect a single spoken utterance. When it detects that the user has @@ -2119,11 +2121,10 @@ will return no more than one ``StreamingRecognitionResult`` with the ``is_final`` flag set to ``true``. interim_results: - *Optional* If ``true``, interim results (tentative hypotheses) - may be returned as they become available (these interim - results are indicated with the ``is_final=false`` flag). If - ``false`` or omitted, only ``is_final=true`` result(s) are - returned. + If ``true``, interim results (tentative hypotheses) may be + returned as they become available (these interim results are + indicated with the ``is_final=false`` flag). If ``false`` or + omitted, only ``is_final=true`` result(s) are returned. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.StreamingRecognitionConfig) ), @@ -2153,22 +2154,22 @@ 16000 is optimal. For best results, set the sampling rate of the audio source to 16000 Hz. If that's not possible, use the native sample rate of the audio source (instead of re- - sampling). This field is optional for ``FLAC`` and ``WAV`` - audio files and required for all other audio formats. For + sampling). This field is optional for FLAC and WAV audio + files, but is required for all other audio formats. For details, see [AudioEncoding][google.cloud.speech.v1p1beta1.Rec ognitionConfig.AudioEncoding]. audio_channel_count: - *Optional* The number of channels in the input audio data. - ONLY set this for MULTI-CHANNEL recognition. Valid values for - LINEAR16 and FLAC are ``1``-``8``. Valid values for OGG\_OPUS - are '1'-'254'. Valid value for MULAW, AMR, AMR\_WB and + The number of channels in the input audio data. ONLY set this + for MULTI-CHANNEL recognition. Valid values for LINEAR16 and + FLAC are ``1``-``8``. Valid values for OGG\_OPUS are + '1'-'254'. Valid value for MULAW, AMR, AMR\_WB and SPEEX\_WITH\_HEADER\_BYTE is only ``1``. If ``0`` or omitted, defaults to one channel (mono). Note: We only recognize the first channel by default. To perform independent recognition on each channel set ``enable_separate_recognition_per_channel`` to 'true'. enable_separate_recognition_per_channel: - This needs to be set to ‘true’ explicitly and + This needs to be set to ``true`` explicitly and ``audio_channel_count`` > 1 to get each channel recognized separately. The recognition result will contain a ``channel_tag`` field to state which channel that result @@ -2177,16 +2178,16 @@ channels recognized: ``audio_channel_count`` multiplied by the length of the audio. language_code: - *Required* The language of the supplied audio as a `BCP-47 + Required. The language of the supplied audio as a `BCP-47 `__ language - tag. Example: "en-US". See `Language Support `__ for a list of the currently supported - language codes. + tag. Example: "en-US". See `Language Support + `__ + for a list of the currently supported language codes. alternative_language_codes: - *Optional* A list of up to 3 additional `BCP-47 - `__ language - tags, listing possible alternative languages of the supplied - audio. See `Language Support `__ language tags, listing + possible alternative languages of the supplied audio. See + `Language Support `__ for a list of the currently supported language codes. If alternative languages are listed, recognition result will contain recognition in the most likely @@ -2197,75 +2198,72 @@ performance may vary for other use cases (e.g., phone call transcription). max_alternatives: - *Optional* Maximum number of recognition hypotheses to be - returned. Specifically, the maximum number of + Maximum number of recognition hypotheses to be returned. + Specifically, the maximum number of ``SpeechRecognitionAlternative`` messages within each ``SpeechRecognitionResult``. The server may return fewer than ``max_alternatives``. Valid values are ``0``-``30``. A value of ``0`` or ``1`` will return a maximum of one. If omitted, will return a maximum of one. profanity_filter: - *Optional* If set to ``true``, the server will attempt to - filter out profanities, replacing all but the initial - character in each filtered word with asterisks, e.g. - "f\*\*\*". If set to ``false`` or omitted, profanities won't - be filtered out. + If set to ``true``, the server will attempt to filter out + profanities, replacing all but the initial character in each + filtered word with asterisks, e.g. "f\*\*\*". If set to + ``false`` or omitted, profanities won't be filtered out. speech_contexts: - *Optional* array of + Array of [SpeechContext][google.cloud.speech.v1p1beta1.SpeechContext]. A means to provide context to assist the speech recognition. - For more information, see `Phrase Hints `__. + For more information, see `speech adaptation + `__. enable_word_time_offsets: - *Optional* If ``true``, the top result includes a list of - words and the start and end time offsets (timestamps) for - those words. If ``false``, no word-level time offset - information is returned. The default is ``false``. + If ``true``, the top result includes a list of words and the + start and end time offsets (timestamps) for those words. If + ``false``, no word-level time offset information is returned. + The default is ``false``. enable_word_confidence: - *Optional* If ``true``, the top result includes a list of - words and the confidence for those words. If ``false``, no - word-level confidence information is returned. The default is - ``false``. + If ``true``, the top result includes a list of words and the + confidence for those words. If ``false``, no word-level + confidence information is returned. The default is ``false``. enable_automatic_punctuation: - *Optional* If 'true', adds punctuation to recognition result - hypotheses. This feature is only available in select - languages. Setting this for requests in other languages has no - effect at all. The default 'false' value does not add - punctuation to result hypotheses. Note: This is currently - offered as an experimental service, complimentary to all - users. In the future this may be exclusively available as a - premium feature. + If 'true', adds punctuation to recognition result hypotheses. + This feature is only available in select languages. Setting + this for requests in other languages has no effect at all. The + default 'false' value does not add punctuation to result + hypotheses. Note: This is currently offered as an experimental + service, complimentary to all users. In the future this may be + exclusively available as a premium feature. enable_speaker_diarization: - *Optional* If 'true', enables speaker detection for each - recognized word in the top alternative of the recognition - result using a speaker\_tag provided in the WordInfo. Note: - Use diarization\_config instead. + If 'true', enables speaker detection for each recognized word + in the top alternative of the recognition result using a + speaker\_tag provided in the WordInfo. Note: Use + diarization\_config instead. diarization_speaker_count: - *Optional* If set, specifies the estimated number of speakers - in the conversation. Defaults to '2'. Ignored unless + If set, specifies the estimated number of speakers in the + conversation. Defaults to '2'. Ignored unless enable\_speaker\_diarization is set to true. Note: Use diarization\_config instead. diarization_config: - *Optional* Config to enable speaker diarization and set - additional parameters to make diarization better suited for - your application. Note: When this is enabled, we send all the - words from the beginning of the audio for the top alternative - in every consecutive STREAMING responses. This is done in - order to improve our speaker tags as our models learn to - identify the speakers in the conversation over time. For non- - streaming requests, the diarization results will be provided - only in the top alternative of the FINAL - SpeechRecognitionResult. + Config to enable speaker diarization and set additional + parameters to make diarization better suited for your + application. Note: When this is enabled, we send all the words + from the beginning of the audio for the top alternative in + every consecutive STREAMING responses. This is done in order + to improve our speaker tags as our models learn to identify + the speakers in the conversation over time. For non-streaming + requests, the diarization results will be provided only in the + top alternative of the FINAL SpeechRecognitionResult. metadata: - *Optional* Metadata regarding this request. + Metadata regarding this request. model: - *Optional* Which model to select for the given request. Select - the model best suited to your domain to get best results. If a - model is not explicitly specified, then we auto-select a model - based on the parameters in the RecognitionConfig. .. raw:: - html .. raw:: html :: - .. - raw:: html .. raw:: html :: + Which model to select for the given request. Select the model + best suited to your domain to get best results. If a model is + not explicitly specified, then we auto-select a model based on + the parameters in the RecognitionConfig. .. raw:: html +
Model Description
.. raw:: html :: + .. raw:: html .. raw:: + html :: .. raw:: html .. raw:: html :: @@ -2284,13 +2282,13 @@ recorded at a 16khz or greater sampling rate. .. raw:: html .. raw:: html
ModelDescription
command_and_search Best for short queries such as voice commands or voice search.
use_enhanced: - *Optional* Set to true to use an enhanced model for speech - recognition. If ``use_enhanced`` is set to true and the - ``model`` field is not set, then an appropriate enhanced model - is chosen if an enhanced model exists for the audio. If - ``use_enhanced`` is true and an enhanced version of the - specified model does not exist, then the speech is recognized - using the standard version of the specified model. + Set to true to use an enhanced model for speech recognition. + If ``use_enhanced`` is set to true and the ``model`` field is + not set, then an appropriate enhanced model is chosen if an + enhanced model exists for the audio. If ``use_enhanced`` is + true and an enhanced version of the specified model does not + exist, then the speech is recognized using the standard + version of the specified model. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.RecognitionConfig) ), @@ -2303,24 +2301,24 @@ dict( DESCRIPTOR=_SPEAKERDIARIZATIONCONFIG, __module__="google.cloud.speech_v1p1beta1.proto.cloud_speech_pb2", - __doc__="""*Optional* Config to enable speaker diarization. + __doc__="""Config to enable speaker diarization. Attributes: enable_speaker_diarization: - *Optional* If 'true', enables speaker detection for each - recognized word in the top alternative of the recognition - result using a speaker\_tag provided in the WordInfo. + If 'true', enables speaker detection for each recognized word + in the top alternative of the recognition result using a + speaker\_tag provided in the WordInfo. min_speaker_count: - *Optional* Minimum number of speakers in the conversation. - This range gives you more flexibility by allowing the system - to automatically determine the correct number of speakers. If - not set, the default value is 2. + Minimum number of speakers in the conversation. This range + gives you more flexibility by allowing the system to + automatically determine the correct number of speakers. If not + set, the default value is 2. max_speaker_count: - *Optional* Maximum number of speakers in the conversation. - This range gives you more flexibility by allowing the system - to automatically determine the correct number of speakers. If - not set, the default value is 6. + Maximum number of speakers in the conversation. This range + gives you more flexibility by allowing the system to + automatically determine the correct number of speakers. If not + set, the default value is 6. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.SpeakerDiarizationConfig) ), @@ -2386,17 +2384,17 @@ Attributes: phrases: - *Optional* A list of strings containing words and phrases - "hints" so that the speech recognition is more likely to - recognize them. This can be used to improve the accuracy for - specific words and phrases, for example, if specific commands - are typically spoken by the user. This can also be used to add - additional words to the vocabulary of the recognizer. See - `usage limits `__. List items - can also be set to classes for groups of words that represent - common concepts that occur in natural language. For example, - rather than providing phrase hints for every month of the - year, using the $MONTH class improves the likelihood of + A list of strings containing words and phrases "hints" so that + the speech recognition is more likely to recognize them. This + can be used to improve the accuracy for specific words and + phrases, for example, if specific commands are typically + spoken by the user. This can also be used to add additional + words to the vocabulary of the recognizer. See `usage limits + `__. + List items can also be set to classes for groups of words that + represent common concepts that occur in natural language. For + example, rather than providing phrase hints for every month of + the year, using the $MONTH class improves the likelihood of correctly transcribing audio that includes months. boost: Hint Boost. Positive value will increase the probability that @@ -2425,7 +2423,8 @@ ``RecognitionConfig``. Either ``content`` or ``uri`` must be supplied. Supplying both or neither returns [google.rpc.Code.INVALID\_ARGUMENT][google.rpc.Code.INVALID\_ARGUMENT]. - See `content limits `__. + See `content + limits `__. Attributes: @@ -2434,8 +2433,8 @@ Cloud Storage uri. content: The audio data bytes encoded as specified in - ``RecognitionConfig``. Note: as with all bytes fields, - protobuffers use a pure binary representation, whereas JSON + ``RecognitionConfig``. Note: as with all bytes fields, proto + buffers use a pure binary representation, whereas JSON representations use base64. uri: URI that points to a file that contains audio data bytes as @@ -2466,8 +2465,8 @@ Attributes: results: - Output only. Sequential list of transcription results - corresponding to sequential portions of audio. + Sequential list of transcription results corresponding to + sequential portions of audio. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.RecognizeResponse) ), @@ -2490,8 +2489,8 @@ Attributes: results: - Output only. Sequential list of transcription results - corresponding to sequential portions of audio. + Sequential list of transcription results corresponding to + sequential portions of audio. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.LongRunningRecognizeResponse) ), @@ -2584,17 +2583,16 @@ Attributes: error: - Output only. If set, returns a - [google.rpc.Status][google.rpc.Status] message that specifies - the error for the operation. + If set, returns a [google.rpc.Status][google.rpc.Status] + message that specifies the error for the operation. results: - Output only. This repeated list contains zero or more results - that correspond to consecutive portions of the audio currently + This repeated list contains zero or more results that + correspond to consecutive portions of the audio currently being processed. It contains zero or one ``is_final=true`` result (the newly settled portion), followed by zero or more ``is_final=false`` results (the interim results). speech_event_type: - Output only. Indicates the type of speech event. + Indicates the type of speech event. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.StreamingRecognizeResponse) ), @@ -2613,38 +2611,38 @@ Attributes: alternatives: - Output only. May contain one or more recognition hypotheses - (up to the maximum specified in ``max_alternatives``). These - alternatives are ordered in terms of accuracy, with the top - (first) alternative being the most probable, as ranked by the + May contain one or more recognition hypotheses (up to the + maximum specified in ``max_alternatives``). These alternatives + are ordered in terms of accuracy, with the top (first) + alternative being the most probable, as ranked by the recognizer. is_final: - Output only. If ``false``, this ``StreamingRecognitionResult`` - represents an interim result that may change. If ``true``, - this is the final time the speech service will return this - particular ``StreamingRecognitionResult``, the recognizer will - not return any further hypotheses for this portion of the - transcript and corresponding audio. + If ``false``, this ``StreamingRecognitionResult`` represents + an interim result that may change. If ``true``, this is the + final time the speech service will return this particular + ``StreamingRecognitionResult``, the recognizer will not return + any further hypotheses for this portion of the transcript and + corresponding audio. stability: - Output only. An estimate of the likelihood that the recognizer - will not change its guess about this interim result. Values - range from 0.0 (completely unstable) to 1.0 (completely - stable). This field is only provided for interim results + An estimate of the likelihood that the recognizer will not + change its guess about this interim result. Values range from + 0.0 (completely unstable) to 1.0 (completely stable). This + field is only provided for interim results (``is_final=false``). The default of 0.0 is a sentinel value indicating ``stability`` was not set. result_end_time: - Output only. Time offset of the end of this result relative to - the beginning of the audio. + Time offset of the end of this result relative to the + beginning of the audio. channel_tag: For multi-channel audio, this is the channel number corresponding to the recognized result for the audio from that channel. For audio\_channel\_count = N, its output values can range from '1' to 'N'. language_code: - Output only. The `BCP-47 `__ language tag of the language - in this result. This language code was detected to have the - most likelihood of being spoken in the audio. + The `BCP-47 `__ + language tag of the language in this result. This language + code was detected to have the most likelihood of being spoken + in the audio. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.StreamingRecognitionResult) ), @@ -2662,10 +2660,10 @@ Attributes: alternatives: - Output only. May contain one or more recognition hypotheses - (up to the maximum specified in ``max_alternatives``). These - alternatives are ordered in terms of accuracy, with the top - (first) alternative being the most probable, as ranked by the + May contain one or more recognition hypotheses (up to the + maximum specified in ``max_alternatives``). These alternatives + are ordered in terms of accuracy, with the top (first) + alternative being the most probable, as ranked by the recognizer. channel_tag: For multi-channel audio, this is the channel number @@ -2673,10 +2671,10 @@ channel. For audio\_channel\_count = N, its output values can range from '1' to 'N'. language_code: - Output only. The `BCP-47 `__ language tag of the language - in this result. This language code was detected to have the - most likelihood of being spoken in the audio. + The `BCP-47 `__ + language tag of the language in this result. This language + code was detected to have the most likelihood of being spoken + in the audio. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.SpeechRecognitionResult) ), @@ -2694,22 +2692,20 @@ Attributes: transcript: - Output only. Transcript text representing the words that the - user spoke. + Transcript text representing the words that the user spoke. confidence: - Output only. The confidence estimate between 0.0 and 1.0. A - higher number indicates an estimated greater likelihood that - the recognized words are correct. This field is set only for - the top alternative of a non-streaming result or, of a - streaming result where ``is_final=true``. This field is not - guaranteed to be accurate and users should not rely on it to - be always provided. The default of 0.0 is a sentinel value - indicating ``confidence`` was not set. + The confidence estimate between 0.0 and 1.0. A higher number + indicates an estimated greater likelihood that the recognized + words are correct. This field is set only for the top + alternative of a non-streaming result or, of a streaming + result where ``is_final=true``. This field is not guaranteed + to be accurate and users should not rely on it to be always + provided. The default of 0.0 is a sentinel value indicating + ``confidence`` was not set. words: - Output only. A list of word-specific information for each - recognized word. Note: When ``enable_speaker_diarization`` is - true, you will see all the words from the beginning of the - audio. + A list of word-specific information for each recognized word. + Note: When ``enable_speaker_diarization`` is true, you will + see all the words from the beginning of the audio. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.SpeechRecognitionAlternative) ), @@ -2727,36 +2723,35 @@ Attributes: start_time: - Output only. Time offset relative to the beginning of the - audio, and corresponding to the start of the spoken word. This - field is only set if ``enable_word_time_offsets=true`` and - only in the top hypothesis. This is an experimental feature - and the accuracy of the time offset can vary. + Time offset relative to the beginning of the audio, and + corresponding to the start of the spoken word. This field is + only set if ``enable_word_time_offsets=true`` and only in the + top hypothesis. This is an experimental feature and the + accuracy of the time offset can vary. end_time: - Output only. Time offset relative to the beginning of the - audio, and corresponding to the end of the spoken word. This - field is only set if ``enable_word_time_offsets=true`` and - only in the top hypothesis. This is an experimental feature - and the accuracy of the time offset can vary. + Time offset relative to the beginning of the audio, and + corresponding to the end of the spoken word. This field is + only set if ``enable_word_time_offsets=true`` and only in the + top hypothesis. This is an experimental feature and the + accuracy of the time offset can vary. word: - Output only. The word corresponding to this set of - information. + The word corresponding to this set of information. confidence: - Output only. The confidence estimate between 0.0 and 1.0. A - higher number indicates an estimated greater likelihood that - the recognized words are correct. This field is set only for - the top alternative of a non-streaming result or, of a - streaming result where ``is_final=true``. This field is not - guaranteed to be accurate and users should not rely on it to - be always provided. The default of 0.0 is a sentinel value - indicating ``confidence`` was not set. + The confidence estimate between 0.0 and 1.0. A higher number + indicates an estimated greater likelihood that the recognized + words are correct. This field is set only for the top + alternative of a non-streaming result or, of a streaming + result where ``is_final=true``. This field is not guaranteed + to be accurate and users should not rely on it to be always + provided. The default of 0.0 is a sentinel value indicating + ``confidence`` was not set. speaker_tag: - Output only. A distinct integer value is assigned for every - speaker within the audio. This field specifies which one of - those speakers was detected to have spoken this word. Value - ranges from '1' to diarization\_speaker\_count. speaker\_tag - is set if enable\_speaker\_diarization = 'true' and only in - the top alternative. + A distinct integer value is assigned for every speaker within + the audio. This field specifies which one of those speakers + was detected to have spoken this word. Value ranges from '1' + to diarization\_speaker\_count. speaker\_tag is set if + enable\_speaker\_diarization = 'true' and only in the top + alternative. """, # @@protoc_insertion_point(class_scope:google.cloud.speech.v1p1beta1.WordInfo) ), @@ -2765,17 +2760,26 @@ DESCRIPTOR._options = None +_RECOGNIZEREQUEST.fields_by_name["config"]._options = None +_RECOGNIZEREQUEST.fields_by_name["audio"]._options = None +_LONGRUNNINGRECOGNIZEREQUEST.fields_by_name["config"]._options = None +_LONGRUNNINGRECOGNIZEREQUEST.fields_by_name["audio"]._options = None +_STREAMINGRECOGNITIONCONFIG.fields_by_name["config"]._options = None +_RECOGNITIONCONFIG.fields_by_name["language_code"]._options = None _RECOGNITIONCONFIG.fields_by_name["enable_speaker_diarization"]._options = None _RECOGNITIONCONFIG.fields_by_name["diarization_speaker_count"]._options = None +_RECOGNITIONMETADATA.fields_by_name["obfuscated_id"]._options = None _SPEECH = _descriptor.ServiceDescriptor( name="Speech", full_name="google.cloud.speech.v1p1beta1.Speech", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=4563, - serialized_end=5037, + serialized_options=_b( + "\312A\025speech.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=4626, + serialized_end=5268, methods=[ _descriptor.MethodDescriptor( name="Recognize", @@ -2785,7 +2789,7 @@ input_type=_RECOGNIZEREQUEST, output_type=_RECOGNIZERESPONSE, serialized_options=_b( - '\202\323\344\223\002 "\033/v1p1beta1/speech:recognize:\001*' + '\202\323\344\223\002 "\033/v1p1beta1/speech:recognize:\001*\332A\014config,audio' ), ), _descriptor.MethodDescriptor( @@ -2796,7 +2800,7 @@ input_type=_LONGRUNNINGRECOGNIZEREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002+"&/v1p1beta1/speech:longrunningrecognize:\001*' + '\202\323\344\223\002+"&/v1p1beta1/speech:longrunningrecognize:\001*\332A\014config,audio\312A<\n\034LongRunningRecognizeResponse\022\034LongRunningRecognizeMetadata' ), ), _descriptor.MethodDescriptor( diff --git a/speech/google/cloud/speech_v1p1beta1/proto/cloud_speech_pb2_grpc.py b/speech/google/cloud/speech_v1p1beta1/proto/cloud_speech_pb2_grpc.py index 750e4b781acc..ba4950fd256e 100644 --- a/speech/google/cloud/speech_v1p1beta1/proto/cloud_speech_pb2_grpc.py +++ b/speech/google/cloud/speech_v1p1beta1/proto/cloud_speech_pb2_grpc.py @@ -53,6 +53,8 @@ def LongRunningRecognize(self, request, context): google.longrunning.Operations interface. Returns either an `Operation.error` or an `Operation.response` which contains a `LongRunningRecognizeResponse` message. + For more information on asynchronous speech recognition, see the + [how-to](https://cloud.google.com/speech-to-text/docs/async-recognize). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/speech/google/cloud/speech_v1p1beta1/types.py b/speech/google/cloud/speech_v1p1beta1/types.py index f7415a2b9c1c..73a330eafa38 100644 --- a/speech/google/cloud/speech_v1p1beta1/types.py +++ b/speech/google/cloud/speech_v1p1beta1/types.py @@ -24,10 +24,11 @@ from google.longrunning import operations_pb2 from google.protobuf import any_pb2 from google.protobuf import duration_pb2 +from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 -_shared_modules = [operations_pb2, any_pb2, duration_pb2, status_pb2] +_shared_modules = [operations_pb2, any_pb2, duration_pb2, timestamp_pb2, status_pb2] _local_modules = [cloud_speech_pb2] diff --git a/speech/synth.metadata b/speech/synth.metadata index da81eb6a3ae6..560f9d4db5a8 100644 --- a/speech/synth.metadata +++ b/speech/synth.metadata @@ -1,18 +1,26 @@ { - "updateTime": "2019-08-29T22:41:20.931044Z", + "updateTime": "2019-10-31T12:31:55.844532Z", "sources": [ { "generator": { "name": "artman", - "version": "0.35.1", - "dockerImage": "googleapis/artman@sha256:b11c7ea0d0831c54016fb50f4b796d24d1971439b30fbc32a369ba1ac887c384" + "version": "0.41.0", + "dockerImage": "googleapis/artman@sha256:75b38a3b073a7b243545f2332463096624c802bb1e56b8cb6f22ba1ecd325fa9" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "c0e494ca955a4fdd9ad460a5890a354ec3a3a0ff", + "internalRef": "277673798" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/speech/synth.py b/speech/synth.py index dba406260300..c812ed261ef2 100644 --- a/speech/synth.py +++ b/speech/synth.py @@ -61,6 +61,6 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library(unit_cov_level=97, cov_level=100) -s.move(templated_files) +s.move(templated_files, excludes=["noxfile.py"]) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/speech/tests/system/gapic/v1/test_system_speech_v1.py b/speech/tests/system/gapic/v1/test_system_speech_v1.py index 080e431ad81d..1c30e3fac6b9 100644 --- a/speech/tests/system/gapic/v1/test_system_speech_v1.py +++ b/speech/tests/system/gapic/v1/test_system_speech_v1.py @@ -12,25 +12,83 @@ # See the License for the specific language governing permissions and # limitations under the License. -import time +import os +import io +import requests from google.cloud import speech_v1 -from google.cloud.speech_v1 import enums -from google.cloud.speech_v1.proto import cloud_speech_pb2 class TestSystemSpeech(object): def test_recognize(self): + try: + BUCKET = os.environ["GOOGLE_CLOUD_TESTS_SPEECH_BUCKET"] + except KeyError: + BUCKET = "cloud-samples-tests" + client = speech_v1.SpeechClient() - language_code = "en-US" - sample_rate_hertz = 44100 - encoding = enums.RecognitionConfig.AudioEncoding.FLAC + config = { - "language_code": language_code, - "sample_rate_hertz": sample_rate_hertz, - "encoding": encoding, + "encoding": speech_v1.enums.RecognitionConfig.AudioEncoding.FLAC, + "language_code": "en-US", + "sample_rate_hertz": 16000, } - uri = "gs://gapic-toolkit/hello.flac" + + uri = "gs://{}/speech/brooklyn.flac".format(BUCKET) audio = {"uri": uri} + response = client.recognize(config, audio) + + assert response.results[0].alternatives[0].transcript is not None + + def test_long_running_recognize(self): + + try: + BUCKET = os.environ["GOOGLE_CLOUD_TESTS_SPEECH_BUCKET"] + except KeyError: + BUCKET = "cloud-samples-tests" + + client = speech_v1.SpeechClient() + + config = speech_v1.types.RecognitionConfig( + encoding=speech_v1.enums.RecognitionConfig.AudioEncoding.FLAC, + language_code="en-US", + sample_rate_hertz=16000, + ) + + uri = "gs://{}/speech/brooklyn.flac".format(BUCKET) + audio = {"uri": uri} + + response = client.long_running_recognize(config, audio) + + assert response.result() is not None + + def test_streaming_recognize(self): + + try: + BUCKET = os.environ["GOOGLE_CLOUD_TESTS_SPEECH_BUCKET"] + except KeyError: + BUCKET = "cloud-samples-tests" + + client = speech_v1.SpeechClient() + + config = speech_v1.types.RecognitionConfig( + encoding=speech_v1.enums.RecognitionConfig.AudioEncoding.FLAC, + language_code="en-US", + sample_rate_hertz=16000, + ) + streamingConfig = speech_v1.types.StreamingRecognitionConfig(config=config) + + uri = "https://storage.googleapis.com/{}/speech/brooklyn.flac".format(BUCKET) + streaming_requests = [ + speech_v1.types.StreamingRecognizeRequest( + audio_content=requests.get(uri).content + ) + ] + + responses = client.streaming_recognize(streamingConfig, streaming_requests) + + for response in responses: + for result in response.results: + assert result.alternatives[0].transcript is not None diff --git a/speech/tests/system/gapic/v1p1beta1/test_system_speech_v1p1beta1.py b/speech/tests/system/gapic/v1p1beta1/test_system_speech_v1p1beta1.py index 36636c3e2479..36514089d364 100644 --- a/speech/tests/system/gapic/v1p1beta1/test_system_speech_v1p1beta1.py +++ b/speech/tests/system/gapic/v1p1beta1/test_system_speech_v1p1beta1.py @@ -12,25 +12,85 @@ # See the License for the specific language governing permissions and # limitations under the License. -import time +import os +import io +import requests from google.cloud import speech_v1p1beta1 -from google.cloud.speech_v1p1beta1 import enums -from google.cloud.speech_v1p1beta1.proto import cloud_speech_pb2 class TestSystemSpeech(object): def test_recognize(self): + try: + BUCKET = os.environ["GOOGLE_CLOUD_TESTS_SPEECH_BUCKET"] + except KeyError: + BUCKET = "cloud-samples-tests" + client = speech_v1p1beta1.SpeechClient() - language_code = "en-US" - sample_rate_hertz = 44100 - encoding = enums.RecognitionConfig.AudioEncoding.FLAC + config = { - "language_code": language_code, - "sample_rate_hertz": sample_rate_hertz, - "encoding": encoding, + "encoding": speech_v1p1beta1.enums.RecognitionConfig.AudioEncoding.FLAC, + "language_code": "en-US", + "sample_rate_hertz": 16000, } - uri = "gs://gapic-toolkit/hello.flac" + + uri = "gs://{}/speech/brooklyn.flac".format(BUCKET) audio = {"uri": uri} + response = client.recognize(config, audio) + + assert response.results[0].alternatives[0].transcript is not None + + def test_long_running_recognize(self): + + try: + BUCKET = os.environ["GOOGLE_CLOUD_TESTS_SPEECH_BUCKET"] + except KeyError: + BUCKET = "cloud-samples-tests" + + client = speech_v1p1beta1.SpeechClient() + + config = speech_v1p1beta1.types.RecognitionConfig( + encoding=speech_v1p1beta1.enums.RecognitionConfig.AudioEncoding.FLAC, + language_code="en-US", + sample_rate_hertz=16000, + ) + + uri = "gs://{}/speech/brooklyn.flac".format(BUCKET) + audio = {"uri": uri} + + response = client.long_running_recognize(config, audio) + + assert response.result() is not None + + def test_streaming_recognize(self): + + try: + BUCKET = os.environ["GOOGLE_CLOUD_TESTS_SPEECH_BUCKET"] + except KeyError: + BUCKET = "cloud-samples-tests" + + client = speech_v1p1beta1.SpeechClient() + + config = speech_v1p1beta1.types.RecognitionConfig( + encoding=speech_v1p1beta1.enums.RecognitionConfig.AudioEncoding.FLAC, + language_code="en-US", + sample_rate_hertz=16000, + ) + streamingConfig = speech_v1p1beta1.types.StreamingRecognitionConfig( + config=config + ) + + uri = "https://storage.googleapis.com/{}/speech/brooklyn.flac".format(BUCKET) + streaming_requests = [ + speech_v1p1beta1.types.StreamingRecognizeRequest( + audio_content=requests.get(uri).content + ) + ] + + responses = client.streaming_recognize(streamingConfig, streaming_requests) + + for response in responses: + for result in response.results: + assert result.alternatives[0].transcript is not None diff --git a/storage/CHANGELOG.md b/storage/CHANGELOG.md index e0220b603712..55169d4ed82e 100644 --- a/storage/CHANGELOG.md +++ b/storage/CHANGELOG.md @@ -4,6 +4,54 @@ [1]: https://pypi.org/project/google-cloud-storage/#history +## 1.22.0 + +11-05-2019 10:22 PST + + +### New Features +- Add UBLA attrs to IAMConfiguration. ([#9475](https://github.com/googleapis/google-cloud-python/pull/9475)) + +## 1.21.0 + +10-28-2019 21:52 PDT + +### Implementation Changes +- Add gcloud-python header to user agent ([#9551](https://github.com/googleapis/google-cloud-python/pull/9551)) +- Don't report a gapic version for storage ([#9549](https://github.com/googleapis/google-cloud-python/pull/9549)) +- Update storage endpoint from www.googleapis.com to storage.googleapis.com ([#9543](https://github.com/googleapis/google-cloud-python/pull/9543)) +- Call anonymous client method to remove dependency of google application credentials ([#9455](https://github.com/googleapis/google-cloud-python/pull/9455)) +- Enable CSEK w/ V4 signed URLs ([#9450](https://github.com/googleapis/google-cloud-python/pull/9450)) + +### New Features +- Support predefined ACLs in `Bucket.create` ([#9334](https://github.com/googleapis/google-cloud-python/pull/9334)) + +### Documentation +- Add `hmac_key` and notification documentation rst files ([#9529](https://github.com/googleapis/google-cloud-python/pull/9529)) +- Remove references to the old authentication credentials ([#9456](https://github.com/googleapis/google-cloud-python/pull/9456)) +- Clarify docstring for `Blob.download_as_string` ([#9332](https://github.com/googleapis/google-cloud-python/pull/9332)) + +## 1.20.0 + +09-26-2019 06:45 PDT + + +### New Features +- Add `user_project` param to HMAC-related methods. ([#9237](https://github.com/googleapis/google-cloud-python/pull/9237)) +- Add `Blob.from_string` and `Bucket.from_string` factories. ([#9143](https://github.com/googleapis/google-cloud-python/pull/9143)) + +### Documentation +- Fix intersphinx reference to `requests`. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Fix deep / broken URL for service account setup. ([#9164](https://github.com/googleapis/google-cloud-python/pull/9164)) + +### Internal / Testing Changes +- Fix typo in `_helpers.py`. ([#9239](https://github.com/googleapis/google-cloud-python/pull/9239)) +- In systests, retry bucket creation on 503. ([#9248](https://github.com/googleapis/google-cloud-python/pull/9248)) +- Avoid using `REGIONAL` / `MULTI_REGIONAL` in examples, tests. ([#9205](https://github.com/googleapis/google-cloud-python/pull/9205)) +- Move `benchwrapper` into `tests/perf`. ([#9246](https://github.com/googleapis/google-cloud-python/pull/9246)) +- Add support for `STORAGE_EMULATOR_HOST`; add `benchwrapper` script. ([#9219](https://github.com/googleapis/google-cloud-python/pull/9219)) + + ## 1.19.0 08-28-2019 09:45 PDT diff --git a/storage/docs/conf.py b/storage/docs/conf.py index 7f5c6b412f36..a4acae246f15 100644 --- a/storage/docs/conf.py +++ b/storage/docs/conf.py @@ -342,7 +342,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), } diff --git a/storage/docs/constants.rst b/storage/docs/constants.rst new file mode 100644 index 000000000000..ddf5b81f29a7 --- /dev/null +++ b/storage/docs/constants.rst @@ -0,0 +1,7 @@ +Constants +~~~~~~~~~ + +.. automodule:: google.cloud.storage.constants + :members: + :member-order: bysource + diff --git a/storage/docs/hmac_key.rst b/storage/docs/hmac_key.rst new file mode 100644 index 000000000000..432be5f64ebe --- /dev/null +++ b/storage/docs/hmac_key.rst @@ -0,0 +1,6 @@ +HMAC Key Metadata +~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.storage.hmac_key + :members: + :show-inheritance: diff --git a/storage/docs/index.rst b/storage/docs/index.rst index 3257eca3fc3f..7a74f12cdf7c 100644 --- a/storage/docs/index.rst +++ b/storage/docs/index.rst @@ -18,6 +18,9 @@ API Reference buckets acl batch + constants + hmac_key + notification Changelog --------- diff --git a/storage/docs/notification.rst b/storage/docs/notification.rst new file mode 100644 index 000000000000..cdb381d2f703 --- /dev/null +++ b/storage/docs/notification.rst @@ -0,0 +1,6 @@ +Notification +~~~~~~~~~~~~ + +.. automodule:: google.cloud.storage.notification + :members: + :show-inheritance: diff --git a/storage/docs/snippets.py b/storage/docs/snippets.py index ed16c5279e30..8171d5cf80f1 100644 --- a/storage/docs/snippets.py +++ b/storage/docs/snippets.py @@ -39,7 +39,7 @@ def storage_get_started(client, to_delete): bucket = client.get_bucket("bucket-id-here") # Then do other things... blob = bucket.get_blob("/remote/path/to/file.txt") - assert blob.download_as_string() == "My old contents!" + assert blob.download_as_string() == b"My old contents!" blob.upload_from_string("New contents!") blob2 = bucket.blob("/remote/path/storage.txt") blob2.upload_from_filename(filename="/local/path.txt") diff --git a/storage/google/cloud/storage/_helpers.py b/storage/google/cloud/storage/_helpers.py index 93848daa1cde..5bfa13e313ea 100644 --- a/storage/google/cloud/storage/_helpers.py +++ b/storage/google/cloud/storage/_helpers.py @@ -19,6 +19,16 @@ import base64 from hashlib import md5 +import os + +STORAGE_EMULATOR_ENV_VAR = "STORAGE_EMULATOR_HOST" +"""Environment variable defining host for Storage emulator.""" + +_DEFAULT_STORAGE_HOST = u"https://storage.googleapis.com" + + +def _get_storage_host(): + return os.environ.get(STORAGE_EMULATOR_ENV_VAR, _DEFAULT_STORAGE_HOST) def _validate_name(name): @@ -33,7 +43,7 @@ def _validate_name(name): if name is None: return - # The first and las characters must be alphanumeric. + # The first and last characters must be alphanumeric. if not all([name[0].isalnum(), name[-1].isalnum()]): raise ValueError("Bucket names must start and end with a number or letter.") return name diff --git a/storage/google/cloud/storage/_http.py b/storage/google/cloud/storage/_http.py index 14680c9b721b..032f70e02185 100644 --- a/storage/google/cloud/storage/_http.py +++ b/storage/google/cloud/storage/_http.py @@ -29,14 +29,18 @@ class Connection(_http.JSONConnection): :param client_info: (Optional) instance used to generate user agent. """ - DEFAULT_API_ENDPOINT = _http.API_BASE_URL + DEFAULT_API_ENDPOINT = "https://storage.googleapis.com" def __init__(self, client, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT): super(Connection, self).__init__(client, client_info) self.API_BASE_URL = api_endpoint - self._client_info.gapic_version = __version__ self._client_info.client_library_version = __version__ + # TODO: When metrics all use gccl, this should be removed #9552 + if self._client_info.user_agent is None: # pragma: no branch + self._client_info.user_agent = "" + self._client_info.user_agent += " gcloud-python/{} ".format(__version__) + API_VERSION = "v1" """The version of the API, used in building the API call's URL.""" diff --git a/storage/google/cloud/storage/blob.py b/storage/google/cloud/storage/blob.py index 5b8f217d2ec8..20c15e2d7542 100644 --- a/storage/google/cloud/storage/blob.py +++ b/storage/google/cloud/storage/blob.py @@ -43,31 +43,36 @@ from google import resumable_media from google.resumable_media.requests import ChunkedDownload from google.resumable_media.requests import Download +from google.resumable_media.requests import RawDownload +from google.resumable_media.requests import RawChunkedDownload from google.resumable_media.requests import MultipartUpload from google.resumable_media.requests import ResumableUpload +from google.api_core.iam import Policy from google.cloud import exceptions +from google.cloud._helpers import _bytes_to_unicode from google.cloud._helpers import _rfc3339_to_datetime from google.cloud._helpers import _to_bytes -from google.cloud._helpers import _bytes_to_unicode from google.cloud.exceptions import NotFound -from google.api_core.iam import Policy +from google.cloud.storage._helpers import _get_storage_host from google.cloud.storage._helpers import _PropertyMixin from google.cloud.storage._helpers import _scalar_property from google.cloud.storage._signing import generate_signed_url_v2 from google.cloud.storage._signing import generate_signed_url_v4 from google.cloud.storage.acl import ACL from google.cloud.storage.acl import ObjectACL +from google.cloud.storage.constants import STANDARD_STORAGE_CLASS +from google.cloud.storage.constants import NEARLINE_STORAGE_CLASS +from google.cloud.storage.constants import COLDLINE_STORAGE_CLASS +from google.cloud.storage.constants import MULTI_REGIONAL_LEGACY_STORAGE_CLASS +from google.cloud.storage.constants import REGIONAL_LEGACY_STORAGE_CLASS +_STORAGE_HOST = _get_storage_host() _API_ACCESS_ENDPOINT = "https://storage.googleapis.com" _DEFAULT_CONTENT_TYPE = u"application/octet-stream" -_DOWNLOAD_URL_TEMPLATE = ( - u"https://www.googleapis.com/download/storage/v1{path}?alt=media" -) -_BASE_UPLOAD_TEMPLATE = ( - u"https://www.googleapis.com/upload/storage/v1{bucket_path}/o?uploadType=" -) +_DOWNLOAD_URL_TEMPLATE = _STORAGE_HOST + u"/download/storage/v1{path}?alt=media" +_BASE_UPLOAD_TEMPLATE = _STORAGE_HOST + u"/upload/storage/v1{bucket_path}/o?uploadType=" _MULTIPART_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"multipart" _RESUMABLE_URL_TEMPLATE = _BASE_UPLOAD_TEMPLATE + u"resumable" # NOTE: "acl" is also writeable but we defer ACL management to @@ -136,12 +141,12 @@ class Blob(_PropertyMixin): _CHUNK_SIZE_MULTIPLE = 256 * 1024 """Number (256 KB, in bytes) that must divide the chunk size.""" - _STORAGE_CLASSES = ( - "NEARLINE", - "MULTI_REGIONAL", - "REGIONAL", - "COLDLINE", - "STANDARD", # alias for MULTI_REGIONAL/REGIONAL, based on location + STORAGE_CLASSES = ( + STANDARD_STORAGE_CLASS, + NEARLINE_STORAGE_CLASS, + COLDLINE_STORAGE_CLASS, + MULTI_REGIONAL_LEGACY_STORAGE_CLASS, + REGIONAL_LEGACY_STORAGE_CLASS, ) """Allowed values for :attr:`storage_class`. @@ -152,11 +157,6 @@ class Blob(_PropertyMixin): .. note:: This list does not include 'DURABLE_REDUCED_AVAILABILITY', which is only documented for buckets (and deprecated). - - .. note:: - The documentation does *not* mention 'STANDARD', but it is the value - assigned by the back-end for objects created in buckets with 'STANDARD' - set as their 'storage_class'. """ def __init__( @@ -432,11 +432,12 @@ def generate_signed_url( to the ``client`` stored on the blob's bucket. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :type credentials: :class:`google.auth.credentials.Credentials` or :class:`NoneType` - :param credentials: (Optional) The OAuth2 credentials to use to sign - the URL. Defaults to the credentials stored on the - client used. + :param credentials: The authorization credentials to attach to requests. + These credentials identify this application to the service. + If none are specified, the client will attempt to ascertain + the credentials from the environment. :type version: str :param version: (Optional) The version of signed credential to create. @@ -470,6 +471,17 @@ def generate_signed_url( else: helper = generate_signed_url_v4 + if self._encryption_key is not None: + encryption_headers = _get_encryption_headers(self._encryption_key) + if headers is None: + headers = {} + if version == "v2": + # See: https://cloud.google.com/storage/docs/access-control/signed-urls-v2#about-canonical-extension-headers + v2_copy_only = "X-Goog-Encryption-Algorithm" + headers[v2_copy_only] = encryption_headers[v2_copy_only] + else: + headers.update(encryption_headers) + return helper( credentials, resource=resource, @@ -581,7 +593,14 @@ def _get_download_url(self): return _add_query_parameters(base_url, name_value_pairs) def _do_download( - self, transport, file_obj, download_url, headers, start=None, end=None + self, + transport, + file_obj, + download_url, + headers, + start=None, + end=None, + raw_download=False, ): """Perform a download without any error handling. @@ -607,14 +626,30 @@ def _do_download( :type end: int :param end: Optional, The last byte in a range to be downloaded. + + :type raw_download: bool + :param raw_download: + Optional, If true, download the object without any expansion. """ if self.chunk_size is None: - download = Download( + if raw_download: + klass = RawDownload + else: + klass = Download + + download = klass( download_url, stream=file_obj, headers=headers, start=start, end=end ) download.consume(transport) + else: - download = ChunkedDownload( + + if raw_download: + klass = RawChunkedDownload + else: + klass = ChunkedDownload + + download = klass( download_url, self.chunk_size, file_obj, @@ -626,7 +661,9 @@ def _do_download( while not download.finished: download.consume_next_chunk(transport) - def download_to_file(self, file_obj, client=None, start=None, end=None): + def download_to_file( + self, file_obj, client=None, start=None, end=None, raw_download=False + ): """Download the contents of this blob into a file-like object. .. note:: @@ -666,6 +703,10 @@ def download_to_file(self, file_obj, client=None, start=None, end=None): :type end: int :param end: Optional, The last byte in a range to be downloaded. + :type raw_download: bool + :param raw_download: + Optional, If true, download the object without any expansion. + :raises: :class:`google.cloud.exceptions.NotFound` """ download_url = self._get_download_url() @@ -674,11 +715,15 @@ def download_to_file(self, file_obj, client=None, start=None, end=None): transport = self._get_transport(client) try: - self._do_download(transport, file_obj, download_url, headers, start, end) + self._do_download( + transport, file_obj, download_url, headers, start, end, raw_download + ) except resumable_media.InvalidResponse as exc: _raise_from_invalid_response(exc) - def download_to_filename(self, filename, client=None, start=None, end=None): + def download_to_filename( + self, filename, client=None, start=None, end=None, raw_download=False + ): """Download the contents of this blob into a named file. If :attr:`user_project` is set on the bucket, bills the API request @@ -698,11 +743,21 @@ def download_to_filename(self, filename, client=None, start=None, end=None): :type end: int :param end: Optional, The last byte in a range to be downloaded. + :type raw_download: bool + :param raw_download: + Optional, If true, download the object without any expansion. + :raises: :class:`google.cloud.exceptions.NotFound` """ try: with open(filename, "wb") as file_obj: - self.download_to_file(file_obj, client=client, start=start, end=end) + self.download_to_file( + file_obj, + client=client, + start=start, + end=end, + raw_download=raw_download, + ) except resumable_media.DataCorruption: # Delete the corrupt downloaded file. os.remove(filename) @@ -713,8 +768,8 @@ def download_to_filename(self, filename, client=None, start=None, end=None): mtime = time.mktime(updated.timetuple()) os.utime(file_obj.name, (mtime, mtime)) - def download_as_string(self, client=None, start=None, end=None): - """Download the contents of this blob as a string. + def download_as_string(self, client=None, start=None, end=None, raw_download=False): + """Download the contents of this blob as a bytes object. If :attr:`user_project` is set on the bucket, bills the API request to that project. @@ -730,12 +785,22 @@ def download_as_string(self, client=None, start=None, end=None): :type end: int :param end: Optional, The last byte in a range to be downloaded. + :type raw_download: bool + :param raw_download: + Optional, If true, download the object without any expansion. + :rtype: bytes :returns: The data stored in this blob. :raises: :class:`google.cloud.exceptions.NotFound` """ string_buffer = BytesIO() - self.download_to_file(string_buffer, client=client, start=start, end=end) + self.download_to_file( + string_buffer, + client=client, + start=start, + end=end, + raw_download=raw_download, + ) return string_buffer.getvalue() def _get_content_type(self, content_type, filename=None): @@ -1642,13 +1707,20 @@ def update_storage_class(self, new_class, client=None): to that project. :type new_class: str - :param new_class: new storage class for the object + :param new_class: + new storage class for the object. One of: + :attr:`~google.cloud.storage.constants.NEARLINE_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.COLDLINE_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.STANDARD_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.MULTI_REGIONAL_LEGACY_STORAGE_CLASS`, + or + :attr:`~google.cloud.storage.constants.REGIONAL_LEGACY_STORAGE_CLASS`. :type client: :class:`~google.cloud.storage.client.Client` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. """ - if new_class not in self._STORAGE_CLASSES: + if new_class not in self.STORAGE_CLASSES: raise ValueError("Invalid storage class: %s" % (new_class,)) # Update current blob's storage class prior to rewrite @@ -1929,9 +2001,15 @@ def kms_key_name(self): See https://cloud.google.com/storage/docs/storage-classes :rtype: str or ``NoneType`` - :returns: If set, one of "MULTI_REGIONAL", "REGIONAL", - "NEARLINE", "COLDLINE", "STANDARD", or - "DURABLE_REDUCED_AVAILABILITY", else ``None``. + :returns: + If set, one of + :attr:`~google.cloud.storage.constants.STANDARD_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.NEARLINE_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.COLDLINE_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.MULTI_REGIONAL_LEGACY_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.REGIONAL_LEGACY_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.DURABLE_REDUCED_AVAILABILITY_STORAGE_CLASS`, + else ``None``. """ temporary_hold = _scalar_property("temporaryHold") diff --git a/storage/google/cloud/storage/bucket.py b/storage/google/cloud/storage/bucket.py index 40cadd9a0831..2ae9aebfdc0d 100644 --- a/storage/google/cloud/storage/bucket.py +++ b/storage/google/cloud/storage/bucket.py @@ -39,10 +39,37 @@ from google.cloud.storage.acl import BucketACL from google.cloud.storage.acl import DefaultObjectACL from google.cloud.storage.blob import Blob +from google.cloud.storage.constants import COLDLINE_STORAGE_CLASS +from google.cloud.storage.constants import DUAL_REGION_LOCATION_TYPE +from google.cloud.storage.constants import ( + DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS, +) +from google.cloud.storage.constants import MULTI_REGIONAL_LEGACY_STORAGE_CLASS +from google.cloud.storage.constants import MULTI_REGION_LOCATION_TYPE +from google.cloud.storage.constants import NEARLINE_STORAGE_CLASS +from google.cloud.storage.constants import REGIONAL_LEGACY_STORAGE_CLASS +from google.cloud.storage.constants import REGION_LOCATION_TYPE +from google.cloud.storage.constants import STANDARD_STORAGE_CLASS from google.cloud.storage.notification import BucketNotification from google.cloud.storage.notification import NONE_PAYLOAD_FORMAT +_UBLA_BPO_ENABLED_MESSAGE = ( + "Pass only one of 'uniform_bucket_level_access_enabled' / " + "'bucket_policy_only_enabled' to 'IAMConfiguration'." +) +_BPO_ENABLED_MESSAGE = ( + "'IAMConfiguration.bucket_policy_only_enabled' is deprecated. " + "Instead, use 'IAMConfiguration.uniform_bucket_level_access_enabled'." +) +_UBLA_BPO_LOCK_TIME_MESSAGE = ( + "Pass only one of 'uniform_bucket_level_access_lock_time' / " + "'bucket_policy_only_lock_time' to 'IAMConfiguration'." +) +_BPO_LOCK_TIME_MESSAGE = ( + "'IAMConfiguration.bucket_policy_only_lock_time' is deprecated. " + "Instead, use 'IAMConfiguration.uniform_bucket_level_access_lock_time'." +) _LOCATION_SETTER_MESSAGE = ( "Assignment to 'Bucket.location' is deprecated, as it is only " "valid before the bucket is created. Instead, pass the location " @@ -130,7 +157,7 @@ class LifecycleRuleConditions(dict): version. :type matches_storage_class: list(str), one or more of - :attr:`Bucket._STORAGE_CLASSES`. + :attr:`Bucket.STORAGE_CLASSES`. :param matches_storage_class: (optional) apply rule action to items which whose storage class matches this value. @@ -244,7 +271,7 @@ def from_api_repr(cls, resource): class LifecycleRuleSetStorageClass(dict): """Map a lifecycle rule upating storage class of matching items. - :type storage_class: str, one of :attr:`Bucket._STORAGE_CLASSES`. + :type storage_class: str, one of :attr:`Bucket.STORAGE_CLASSES`. :param storage_class: new storage class to assign to matching items. :type kw: dict @@ -275,29 +302,66 @@ def from_api_repr(cls, resource): return instance +_default = object() + + class IAMConfiguration(dict): """Map a bucket's IAM configuration. :type bucket: :class:`Bucket` :params bucket: Bucket for which this instance is the policy. + :type uniform_bucket_level_access_enabled: bool + :params bucket_policy_only_enabled: + (optional) whether the IAM-only policy is enabled for the bucket. + + :type uniform_bucket_level_locked_time: :class:`datetime.datetime` + :params uniform_bucket_level_locked_time: + (optional) When the bucket's IAM-only policy was enabled. + This value should normally only be set by the back-end API. + :type bucket_policy_only_enabled: bool - :params bucket_policy_only_enabled: (optional) whether the IAM-only policy is enabled for the bucket. + :params bucket_policy_only_enabled: + Deprecated alias for :data:`uniform_bucket_level_access_enabled`. :type bucket_policy_only_locked_time: :class:`datetime.datetime` - :params bucket_policy_only_locked_time: (optional) When the bucket's IAM-only policy was ehabled. This value should normally only be set by the back-end API. + :params bucket_policy_only_locked_time: + Deprecated alias for :data:`uniform_bucket_level_access_locked_time`. """ def __init__( self, bucket, - bucket_policy_only_enabled=False, - bucket_policy_only_locked_time=None, + uniform_bucket_level_access_enabled=_default, + uniform_bucket_level_access_locked_time=_default, + bucket_policy_only_enabled=_default, + bucket_policy_only_locked_time=_default, ): - data = {"bucketPolicyOnly": {"enabled": bucket_policy_only_enabled}} - if bucket_policy_only_locked_time is not None: - data["bucketPolicyOnly"]["lockedTime"] = _datetime_to_rfc3339( - bucket_policy_only_locked_time + if bucket_policy_only_enabled is not _default: + + if uniform_bucket_level_access_enabled is not _default: + raise ValueError(_UBLA_BPO_ENABLED_MESSAGE) + + warnings.warn(_BPO_ENABLED_MESSAGE, DeprecationWarning, stacklevel=2) + uniform_bucket_level_access_enabled = bucket_policy_only_enabled + + if bucket_policy_only_locked_time is not _default: + + if uniform_bucket_level_access_locked_time is not _default: + raise ValueError(_UBLA_BPO_LOCK_TIME_MESSAGE) + + warnings.warn(_BPO_LOCK_TIME_MESSAGE, DeprecationWarning, stacklevel=2) + uniform_bucket_level_access_locked_time = bucket_policy_only_locked_time + + if uniform_bucket_level_access_enabled is _default: + uniform_bucket_level_access_enabled = False + + data = { + "uniformBucketLevelAccess": {"enabled": uniform_bucket_level_access_enabled} + } + if uniform_bucket_level_access_locked_time is not _default: + data["uniformBucketLevelAccess"]["lockedTime"] = _datetime_to_rfc3339( + uniform_bucket_level_access_locked_time ) super(IAMConfiguration, self).__init__(data) self._bucket = bucket @@ -329,41 +393,66 @@ def bucket(self): return self._bucket @property - def bucket_policy_only_enabled(self): + def uniform_bucket_level_access_enabled(self): """If set, access checks only use bucket-level IAM policies or above. :rtype: bool :returns: whether the bucket is configured to allow only IAM. """ - bpo = self.get("bucketPolicyOnly", {}) - return bpo.get("enabled", False) + ubla = self.get("uniformBucketLevelAccess", {}) + return ubla.get("enabled", False) - @bucket_policy_only_enabled.setter - def bucket_policy_only_enabled(self, value): - bpo = self.setdefault("bucketPolicyOnly", {}) - bpo["enabled"] = bool(value) + @uniform_bucket_level_access_enabled.setter + def uniform_bucket_level_access_enabled(self, value): + ubla = self.setdefault("uniformBucketLevelAccess", {}) + ubla["enabled"] = bool(value) self.bucket._patch_property("iamConfiguration", self) @property - def bucket_policy_only_locked_time(self): - """Deadline for changing :attr:`bucket_policy_only_enabled` from true to false. + def uniform_bucket_level_access_locked_time(self): + """Deadline for changing :attr:`uniform_bucket_level_access_enabled` from true to false. - If the bucket's :attr:`bucket_policy_only_enabled` is true, this property + If the bucket's :attr:`uniform_bucket_level_access_enabled` is true, this property is time time after which that setting becomes immutable. - If the bucket's :attr:`bucket_policy_only_enabled` is false, this property + If the bucket's :attr:`uniform_bucket_level_access_enabled` is false, this property is ``None``. :rtype: Union[:class:`datetime.datetime`, None] - :returns: (readonly) Time after which :attr:`bucket_policy_only_enabled` will + :returns: (readonly) Time after which :attr:`uniform_bucket_level_access_enabled` will be frozen as true. """ - bpo = self.get("bucketPolicyOnly", {}) - stamp = bpo.get("lockedTime") + ubla = self.get("uniformBucketLevelAccess", {}) + stamp = ubla.get("lockedTime") if stamp is not None: stamp = _rfc3339_to_datetime(stamp) return stamp + @property + def bucket_policy_only_enabled(self): + """Deprecated alias for :attr:`uniform_bucket_level_access_enabled`. + + :rtype: bool + :returns: whether the bucket is configured to allow only IAM. + """ + return self.uniform_bucket_level_access_enabled + + @bucket_policy_only_enabled.setter + def bucket_policy_only_enabled(self, value): + warnings.warn(_BPO_ENABLED_MESSAGE, DeprecationWarning, stacklevel=2) + self.uniform_bucket_level_access_enabled = value + + @property + def bucket_policy_only_locked_time(self): + """Deprecated alias for :attr:`uniform_bucket_level_access_locked_time`. + + :rtype: Union[:class:`datetime.datetime`, None] + :returns: + (readonly) Time after which :attr:`bucket_policy_only_enabled` will + be frozen as true. + """ + return self.uniform_bucket_level_access_locked_time + class Bucket(_PropertyMixin): """A class representing a Bucket on Cloud Storage. @@ -387,38 +476,7 @@ class Bucket(_PropertyMixin): This is used in Bucket.delete() and Bucket.make_public(). """ - STANDARD_STORAGE_CLASS = "STANDARD" - """Storage class for objects accessed more than once per month.""" - - NEARLINE_STORAGE_CLASS = "NEARLINE" - """Storage class for objects accessed at most once per month.""" - - COLDLINE_STORAGE_CLASS = "COLDLINE" - """Storage class for objects accessed at most once per year.""" - - MULTI_REGIONAL_LEGACY_STORAGE_CLASS = "MULTI_REGIONAL" - """Legacy storage class. - - Alias for :attr:`STANDARD_STORAGE_CLASS`. - - Implies :attr:`MULTI_REGION_LOCATION_TYPE` for :attr:`location_type`. - """ - - REGIONAL_LEGACY_STORAGE_CLASS = "REGIONAL" - """Legacy storage class. - - Alias for :attr:`STANDARD_STORAGE_CLASS`. - - Implies :attr:`REGION_LOCATION_TYPE` for :attr:`location_type`. - """ - - DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS = "DURABLE_REDUCED_AVAILABILITY" - """Legacy storage class. - - Similar to :attr:`NEARLINE_STORAGE_CLASS`. - """ - - _STORAGE_CLASSES = ( + STORAGE_CLASSES = ( STANDARD_STORAGE_CLASS, NEARLINE_STORAGE_CLASS, COLDLINE_STORAGE_CLASS, @@ -435,24 +493,6 @@ class Bucket(_PropertyMixin): https://cloud.google.com/storage/docs/storage-classes """ - MULTI_REGION_LOCATION_TYPE = "multi-region" - """Location type: data will be replicated across regions in a multi-region. - - Provides highest availability across largest area. - """ - - REGION_LOCATION_TYPE = "region" - """Location type: data will be stored within a single region. - - Provides lowest latency within a single region. - """ - - DUAL_REGION_LOCATION_TYPE = "dual-region" - """Location type: data will be stored within two primary regions. - - Provides high availability and low latency across two regions. - """ - _LOCATION_TYPES = ( MULTI_REGION_LOCATION_TYPE, REGION_LOCATION_TYPE, @@ -633,7 +673,14 @@ def exists(self, client=None): except NotFound: return False - def create(self, client=None, project=None, location=None): + def create( + self, + client=None, + project=None, + location=None, + predefined_acl=None, + predefined_default_object_acl=None, + ): """Creates current bucket. If the bucket already exists, will raise @@ -660,6 +707,16 @@ def create(self, client=None, project=None, location=None): :param location: Optional. The location of the bucket. If not passed, the default location, US, will be used. See https://cloud.google.com/storage/docs/bucket-locations + + :type predefined_acl: str + :param predefined_acl: + Optional. Name of predefined ACL to apply to bucket. See: + https://cloud.google.com/storage/docs/access-control/lists#predefined-acl + + :type predefined_default_object_acl: str + :param predefined_default_object_acl: + Optional. Name of predefined ACL to apply to bucket's objects. See: + https://cloud.google.com/storage/docs/access-control/lists#predefined-acl """ if self.user_project is not None: raise ValueError("Cannot create bucket with 'user_project' set.") @@ -673,6 +730,17 @@ def create(self, client=None, project=None, location=None): raise ValueError("Client project not set: pass an explicit project.") query_params = {"project": project} + + if predefined_acl is not None: + predefined_acl = BucketACL.validate_predefined(predefined_acl) + query_params["predefinedAcl"] = predefined_acl + + if predefined_default_object_acl is not None: + predefined_default_object_acl = DefaultObjectACL.validate_predefined( + predefined_default_object_acl + ) + query_params["predefinedDefaultObjectAcl"] = predefined_default_object_acl + properties = {key: self._properties[key] for key in self._changes} properties["name"] = self.name @@ -1422,7 +1490,7 @@ def add_lifecycle_set_storage_class_rule(self, storage_class, **kw): :start-after: [START add_lifecycle_set_storage_class_rule] :end-before: [END add_lifecycle_set_storage_class_rule] - :type storage_class: str, one of :attr:`_STORAGE_CLASSES`. + :type storage_class: str, one of :attr:`STORAGE_CLASSES`. :param storage_class: new storage class to assign to matching items. :type kw: dict @@ -1476,8 +1544,10 @@ def location_type(self): :rtype: str or ``NoneType`` :returns: - If set, one of :attr:`MULTI_REGION_LOCATION_TYPE`, - :attr:`REGION_LOCATION_TYPE`, or :attr:`DUAL_REGION_LOCATION_TYPE`, + If set, one of + :attr:`~google.cloud.storage.constants.MULTI_REGION_LOCATION_TYPE`, + :attr:`~google.cloud.storage.constants.REGION_LOCATION_TYPE`, or + :attr:`~google.cloud.storage.constants.DUAL_REGION_LOCATION_TYPE`, else ``None``. """ return self._properties.get("locationType") @@ -1639,12 +1709,14 @@ def storage_class(self): :rtype: str or ``NoneType`` :returns: - If set, one of :attr:`NEARLINE_STORAGE_CLASS`, - :attr:`COLDLINE_STORAGE_CLASS`, :attr:`STANDARD_STORAGE_CLASS`, - :attr:`MULTI_REGIONAL_LEGACY_STORAGE_CLASS`, - :attr:`REGIONAL_LEGACY_STORAGE_CLASS`, + If set, one of + :attr:`~google.cloud.storage.constants.NEARLINE_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.COLDLINE_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.STANDARD_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.MULTI_REGIONAL_LEGACY_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.REGIONAL_LEGACY_STORAGE_CLASS`, or - :attr:`DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`, else ``None``. """ return self._properties.get("storageClass") @@ -1657,14 +1729,16 @@ def storage_class(self, value): :type value: str :param value: - One of :attr:`NEARLINE_STORAGE_CLASS`, - :attr:`COLDLINE_STORAGE_CLASS`, :attr:`STANDARD_STORAGE_CLASS`, - :attr:`MULTI_REGIONAL_LEGACY_STORAGE_CLASS`, - :attr:`REGIONAL_LEGACY_STORAGE_CLASS`, + One of + :attr:`~google.cloud.storage.constants.NEARLINE_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.COLDLINE_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.STANDARD_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.MULTI_REGIONAL_LEGACY_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.REGIONAL_LEGACY_STORAGE_CLASS`, or - :attr:`DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`, + :attr:`~google.cloud.storage.constants.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS`, """ - if value not in self._STORAGE_CLASSES: + if value not in self.STORAGE_CLASSES: raise ValueError("Invalid storage class: %s" % (value,)) self._patch_property("storageClass", value) @@ -2153,11 +2227,12 @@ def generate_signed_url( to the ``client`` stored on the blob's bucket. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :type credentials: :class:`google.auth.credentials.Credentials` or :class:`NoneType` - :param credentials: (Optional) The OAuth2 credentials to use to sign - the URL. Defaults to the credentials stored on the - client used. + :param credentials: The authorization credentials to attach to requests. + These credentials identify this application to the service. + If none are specified, the client will attempt to ascertain + the credentials from the environment. :type version: str :param version: (Optional) The version of signed credential to create. diff --git a/storage/google/cloud/storage/client.py b/storage/google/cloud/storage/client.py index 0bb69252aeee..ac58fb2a092d 100644 --- a/storage/google/cloud/storage/client.py +++ b/storage/google/cloud/storage/client.py @@ -22,6 +22,7 @@ from google.cloud._helpers import _LocalStack from google.cloud.client import ClientWithProject from google.cloud.exceptions import NotFound +from google.cloud.storage._helpers import _get_storage_host from google.cloud.storage._http import Connection from google.cloud.storage.batch import Batch from google.cloud.storage.bucket import Bucket @@ -94,6 +95,9 @@ def __init__( ) kw_args = {"client_info": client_info} + + kw_args["api_endpoint"] = _get_storage_host() + if client_options: if type(client_options) == dict: client_options = google.api_core.client_options.from_dict( @@ -578,7 +582,9 @@ def list_buckets( extra_params=extra_params, ) - def create_hmac_key(self, service_account_email, project_id=None): + def create_hmac_key( + self, service_account_email, project_id=None, user_project=None + ): """Create an HMAC key for a service account. :type service_account_email: str @@ -588,6 +594,9 @@ def create_hmac_key(self, service_account_email, project_id=None): :param project_id: (Optional) explicit project ID for the key. Defaults to the client's project. + :type user_project: str + :param user_project: (Optional) This parameter is currently ignored. + :rtype: Tuple[:class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`, str] :returns: metadata for the created key, plus the bytes of the key's secret, which is an 40-character base64-encoded string. @@ -597,6 +606,10 @@ def create_hmac_key(self, service_account_email, project_id=None): path = "/projects/{}/hmacKeys".format(project_id) qs_params = {"serviceAccountEmail": service_account_email} + + if user_project is not None: + qs_params["userProject"] = user_project + api_response = self._connection.api_request( method="POST", path=path, query_params=qs_params ) @@ -611,6 +624,7 @@ def list_hmac_keys( service_account_email=None, show_deleted_keys=None, project_id=None, + user_project=None, ): """List HMAC keys for a project. @@ -631,6 +645,9 @@ def list_hmac_keys( :param project_id: (Optional) explicit project ID for the key. Defaults to the client's project. + :type user_project: str + :param user_project: (Optional) This parameter is currently ignored. + :rtype: Tuple[:class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`, str] :returns: metadata for the created key, plus the bytes of the key's secret, which is an 40-character base64-encoded string. @@ -647,6 +664,9 @@ def list_hmac_keys( if show_deleted_keys is not None: extra_params["showDeletedKeys"] = show_deleted_keys + if user_project is not None: + extra_params["userProject"] = user_project + return page_iterator.HTTPIterator( client=self, api_request=self._connection.api_request, @@ -656,7 +676,7 @@ def list_hmac_keys( extra_params=extra_params, ) - def get_hmac_key_metadata(self, access_id, project_id=None): + def get_hmac_key_metadata(self, access_id, project_id=None, user_project=None): """Return a metadata instance for the given HMAC key. :type access_id: str @@ -665,8 +685,11 @@ def get_hmac_key_metadata(self, access_id, project_id=None): :type project_id: str :param project_id: (Optional) project ID of an existing key. Defaults to client's project. + + :type user_project: str + :param user_project: (Optional) This parameter is currently ignored. """ - metadata = HMACKeyMetadata(self, access_id, project_id) + metadata = HMACKeyMetadata(self, access_id, project_id, user_project) metadata.reload() # raises NotFound for missing key return metadata diff --git a/storage/google/cloud/storage/constants.py b/storage/google/cloud/storage/constants.py new file mode 100644 index 000000000000..e93d3ab29546 --- /dev/null +++ b/storage/google/cloud/storage/constants.py @@ -0,0 +1,72 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Constants used acros google.cloud.storage modules.""" + +# Storage classes + +STANDARD_STORAGE_CLASS = "STANDARD" +"""Storage class for objects accessed more than once per month.""" + +NEARLINE_STORAGE_CLASS = "NEARLINE" +"""Storage class for objects accessed at most once per month.""" + +COLDLINE_STORAGE_CLASS = "COLDLINE" +"""Storage class for objects accessed at most once per year.""" + +MULTI_REGIONAL_LEGACY_STORAGE_CLASS = "MULTI_REGIONAL" +"""Legacy storage class. + +Alias for :attr:`STANDARD_STORAGE_CLASS`. + +Can only be used for objects in buckets whose +:attr:`~google.cloud.storage.bucket.Bucket.location_type` is +:attr:`~google.cloud.storage.bucket.Bucket.MULTI_REGION_LOCATION_TYPE`. +""" + +REGIONAL_LEGACY_STORAGE_CLASS = "REGIONAL" +"""Legacy storage class. + +Alias for :attr:`STANDARD_STORAGE_CLASS`. + +Can only be used for objects in buckets whose +:attr:`~google.cloud.storage.bucket.Bucket.location_type` is +:attr:`~google.cloud.storage.bucket.Bucket.REGION_LOCATION_TYPE`. +""" + +DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS = "DURABLE_REDUCED_AVAILABILITY" +"""Legacy storage class. + +Similar to :attr:`NEARLINE_STORAGE_CLASS`. +""" + + +# Location types + +MULTI_REGION_LOCATION_TYPE = "multi-region" +"""Location type: data will be replicated across regions in a multi-region. + +Provides highest availability across largest area. +""" + +REGION_LOCATION_TYPE = "region" +"""Location type: data will be stored within a single region. + +Provides lowest latency within a single region. +""" + +DUAL_REGION_LOCATION_TYPE = "dual-region" +"""Location type: data will be stored within two primary regions. + +Provides high availability and low latency across two regions. +""" diff --git a/storage/google/cloud/storage/hmac_key.py b/storage/google/cloud/storage/hmac_key.py index 09075896fcb2..96ccbcaed910 100644 --- a/storage/google/cloud/storage/hmac_key.py +++ b/storage/google/cloud/storage/hmac_key.py @@ -28,6 +28,9 @@ class HMACKeyMetadata(object): :type project_id: str :param project_id: (Optional) project ID of an existing key. Defaults to client's project. + + :type user_project: str + :param user_project: (Optional) This parameter is currently ignored. """ ACTIVE_STATE = "ACTIVE" @@ -42,7 +45,7 @@ class HMACKeyMetadata(object): _SETTABLE_STATES = (ACTIVE_STATE, INACTIVE_STATE) - def __init__(self, client, access_id=None, project_id=None): + def __init__(self, client, access_id=None, project_id=None, user_project=None): self._client = client self._properties = {} @@ -52,6 +55,8 @@ def __init__(self, client, access_id=None, project_id=None): if project_id is not None: self._properties["projectId"] = project_id + self._user_project = user_project + def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented @@ -170,6 +175,16 @@ def path(self): return "/projects/{}/hmacKeys/{}".format(project, self.access_id) + @property + def user_project(self): + """Project ID to be billed for API requests made via this bucket. + + This property is currently ignored by the server. + + :rtype: str + """ + return self._user_project + def exists(self): """Determine whether or not the key for this metadata exists. @@ -177,7 +192,14 @@ def exists(self): :returns: True if the key exists in Cloud Storage. """ try: - self._client._connection.api_request(method="GET", path=self.path) + qs_params = {} + + if self.user_project is not None: + qs_params["userProject"] = self.user_project + + self._client._connection.api_request( + method="GET", path=self.path, query_params=qs_params + ) except NotFound: return False else: @@ -189,8 +211,13 @@ def reload(self): :raises :class:`~google.api_core.exceptions.NotFound`: if the key does not exist on the back-end. """ + qs_params = {} + + if self.user_project is not None: + qs_params["userProject"] = self.user_project + self._properties = self._client._connection.api_request( - method="GET", path=self.path + method="GET", path=self.path, query_params=qs_params ) def update(self): @@ -199,9 +226,13 @@ def update(self): :raises :class:`~google.api_core.exceptions.NotFound`: if the key does not exist on the back-end. """ + qs_params = {} + if self.user_project is not None: + qs_params["userProject"] = self.user_project + payload = {"state": self.state} self._properties = self._client._connection.api_request( - method="PUT", path=self.path, data=payload + method="PUT", path=self.path, data=payload, query_params=qs_params ) def delete(self): @@ -213,4 +244,10 @@ def delete(self): if self.state != self.INACTIVE_STATE: raise ValueError("Cannot delete key if not in 'INACTIVE' state.") - self._client._connection.api_request(method="DELETE", path=self.path) + qs_params = {} + if self.user_project is not None: + qs_params["userProject"] = self.user_project + + self._client._connection.api_request( + method="DELETE", path=self.path, query_params=qs_params + ) diff --git a/storage/setup.py b/storage/setup.py index 017caf1cfa1b..146b4594ad9f 100644 --- a/storage/setup.py +++ b/storage/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-storage" description = "Google Cloud Storage API client library" -version = "1.19.0" +version = "1.22.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' @@ -31,7 +31,7 @@ dependencies = [ "google-auth >= 1.2.0", "google-cloud-core >= 1.0.3, < 2.0dev", - "google-resumable-media >= 0.3.1", + "google-resumable-media >= 0.5.0, < 0.6dev", ] extras = {} diff --git a/storage/tests/perf/README.md b/storage/tests/perf/README.md new file mode 100644 index 000000000000..e77589f610d3 --- /dev/null +++ b/storage/tests/perf/README.md @@ -0,0 +1,21 @@ +# storage benchwrapp + +main.py is a gRPC wrapper around the storage library for benchmarking purposes. + +## Running + +```bash +$ export STORAGE_EMULATOR_HOST=http://localhost:8080 +$ pip install grpcio +$ cd storage +$ pip install -e . # install google.cloud.storage locally +$ cd tests/perf +$ python3 benchwrapper.py --port 8081 +``` + +## Re-generating protos + +```bash +$ pip install grpcio-tools +$ python -m grpc_tools.protoc -I. --python_out=. --grpc_python_out=. *.proto +``` diff --git a/storage/tests/perf/benchwrapper.py b/storage/tests/perf/benchwrapper.py new file mode 100644 index 000000000000..9ebb3f455839 --- /dev/null +++ b/storage/tests/perf/benchwrapper.py @@ -0,0 +1,54 @@ +import argparse +import sys +import time +import grpc +import os +from concurrent import futures +import storage_pb2_grpc +import storage_pb2 +from google.cloud import storage + +_ONE_DAY_IN_SECONDS = 60 * 60 * 24 + +parser = argparse.ArgumentParser() + +if os.environ.get("STORAGE_EMULATOR_HOST") is None: + sys.exit( + "This benchmarking server only works when connected to an emulator. Please set STORAGE_EMULATOR_HOST." + ) + +parser.add_argument("--port", help="The port to run on.") + +args = parser.parse_args() + +if args.port is None: + sys.exit("Usage: python3 main.py --port 8081") + +client = storage.Client.create_anonymous_client() + + +class StorageBenchWrapperServicer(storage_pb2_grpc.StorageBenchWrapperServicer): + def Write(self, request, context): + # TODO(deklerk): implement this + return storage_pb2.EmptyResponse() + + def Read(self, request, context): + bucket = client.bucket(request.bucketName) + blob = storage.Blob(request.objectName, bucket) + blob.download_as_string() + return storage_pb2.EmptyResponse() + + +server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) +storage_pb2_grpc.add_StorageBenchWrapperServicer_to_server( + StorageBenchWrapperServicer(), server +) + +print("listening on localhost:" + args.port) +server.add_insecure_port("[::]:" + args.port) +server.start() +try: + while True: + time.sleep(_ONE_DAY_IN_SECONDS) +except KeyboardInterrupt: + server.stop(0) diff --git a/storage/tests/perf/storage.proto b/storage/tests/perf/storage.proto new file mode 100644 index 000000000000..055e7e7867c0 --- /dev/null +++ b/storage/tests/perf/storage.proto @@ -0,0 +1,43 @@ +// Copyright 2019 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package storage_bench; + +message ObjectRead{ + // The bucket string identifier. + string bucketName = 1; + // The object/blob string identifier. + string objectName = 2; +} + +message ObjectWrite{ + // The bucket string identifier. + string bucketName = 1; + // The object/blob string identifiers. + string objectName = 2; + // The string containing the upload file path. + string destination = 3; +} + +message EmptyResponse{ +} + +service StorageBenchWrapper{ + // Performs an upload from a specific object. + rpc Write(ObjectWrite) returns (EmptyResponse) {} + // Read a specific object. + rpc Read(ObjectRead) returns (EmptyResponse){} +} \ No newline at end of file diff --git a/storage/tests/perf/storage_pb2.py b/storage/tests/perf/storage_pb2.py new file mode 100644 index 000000000000..59ea52f919d0 --- /dev/null +++ b/storage/tests/perf/storage_pb2.py @@ -0,0 +1,252 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: storage.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="storage.proto", + package="storage_bench", + syntax="proto3", + serialized_options=None, + serialized_pb=_b( + '\n\rstorage.proto\x12\rstorage_bench"4\n\nObjectRead\x12\x12\n\nbucketName\x18\x01 \x01(\t\x12\x12\n\nobjectName\x18\x02 \x01(\t"J\n\x0bObjectWrite\x12\x12\n\nbucketName\x18\x01 \x01(\t\x12\x12\n\nobjectName\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t"\x0f\n\rEmptyResponse2\x9d\x01\n\x13StorageBenchWrapper\x12\x43\n\x05Write\x12\x1a.storage_bench.ObjectWrite\x1a\x1c.storage_bench.EmptyResponse"\x00\x12\x41\n\x04Read\x12\x19.storage_bench.ObjectRead\x1a\x1c.storage_bench.EmptyResponse"\x00\x62\x06proto3' + ), +) + + +_OBJECTREAD = _descriptor.Descriptor( + name="ObjectRead", + full_name="storage_bench.ObjectRead", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="bucketName", + full_name="storage_bench.ObjectRead.bucketName", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="objectName", + full_name="storage_bench.ObjectRead.objectName", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=32, + serialized_end=84, +) + + +_OBJECTWRITE = _descriptor.Descriptor( + name="ObjectWrite", + full_name="storage_bench.ObjectWrite", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="bucketName", + full_name="storage_bench.ObjectWrite.bucketName", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="objectName", + full_name="storage_bench.ObjectWrite.objectName", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="destination", + full_name="storage_bench.ObjectWrite.destination", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=86, + serialized_end=160, +) + + +_EMPTYRESPONSE = _descriptor.Descriptor( + name="EmptyResponse", + full_name="storage_bench.EmptyResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=162, + serialized_end=177, +) + +DESCRIPTOR.message_types_by_name["ObjectRead"] = _OBJECTREAD +DESCRIPTOR.message_types_by_name["ObjectWrite"] = _OBJECTWRITE +DESCRIPTOR.message_types_by_name["EmptyResponse"] = _EMPTYRESPONSE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +ObjectRead = _reflection.GeneratedProtocolMessageType( + "ObjectRead", + (_message.Message,), + { + "DESCRIPTOR": _OBJECTREAD, + "__module__": "storage_pb2" + # @@protoc_insertion_point(class_scope:storage_bench.ObjectRead) + }, +) +_sym_db.RegisterMessage(ObjectRead) + +ObjectWrite = _reflection.GeneratedProtocolMessageType( + "ObjectWrite", + (_message.Message,), + { + "DESCRIPTOR": _OBJECTWRITE, + "__module__": "storage_pb2" + # @@protoc_insertion_point(class_scope:storage_bench.ObjectWrite) + }, +) +_sym_db.RegisterMessage(ObjectWrite) + +EmptyResponse = _reflection.GeneratedProtocolMessageType( + "EmptyResponse", + (_message.Message,), + { + "DESCRIPTOR": _EMPTYRESPONSE, + "__module__": "storage_pb2" + # @@protoc_insertion_point(class_scope:storage_bench.EmptyResponse) + }, +) +_sym_db.RegisterMessage(EmptyResponse) + + +_STORAGEBENCHWRAPPER = _descriptor.ServiceDescriptor( + name="StorageBenchWrapper", + full_name="storage_bench.StorageBenchWrapper", + file=DESCRIPTOR, + index=0, + serialized_options=None, + serialized_start=180, + serialized_end=337, + methods=[ + _descriptor.MethodDescriptor( + name="Write", + full_name="storage_bench.StorageBenchWrapper.Write", + index=0, + containing_service=None, + input_type=_OBJECTWRITE, + output_type=_EMPTYRESPONSE, + serialized_options=None, + ), + _descriptor.MethodDescriptor( + name="Read", + full_name="storage_bench.StorageBenchWrapper.Read", + index=1, + containing_service=None, + input_type=_OBJECTREAD, + output_type=_EMPTYRESPONSE, + serialized_options=None, + ), + ], +) +_sym_db.RegisterServiceDescriptor(_STORAGEBENCHWRAPPER) + +DESCRIPTOR.services_by_name["StorageBenchWrapper"] = _STORAGEBENCHWRAPPER + +# @@protoc_insertion_point(module_scope) diff --git a/storage/tests/perf/storage_pb2_grpc.py b/storage/tests/perf/storage_pb2_grpc.py new file mode 100644 index 000000000000..1b3a2c82f50b --- /dev/null +++ b/storage/tests/perf/storage_pb2_grpc.py @@ -0,0 +1,64 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import storage_pb2 as storage__pb2 + + +class StorageBenchWrapperStub(object): + # missing associated documentation comment in .proto file + pass + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Write = channel.unary_unary( + "/storage_bench.StorageBenchWrapper/Write", + request_serializer=storage__pb2.ObjectWrite.SerializeToString, + response_deserializer=storage__pb2.EmptyResponse.FromString, + ) + self.Read = channel.unary_unary( + "/storage_bench.StorageBenchWrapper/Read", + request_serializer=storage__pb2.ObjectRead.SerializeToString, + response_deserializer=storage__pb2.EmptyResponse.FromString, + ) + + +class StorageBenchWrapperServicer(object): + # missing associated documentation comment in .proto file + pass + + def Write(self, request, context): + """Performs an upload from a specific object. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def Read(self, request, context): + """Read a specific object. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_StorageBenchWrapperServicer_to_server(servicer, server): + rpc_method_handlers = { + "Write": grpc.unary_unary_rpc_method_handler( + servicer.Write, + request_deserializer=storage__pb2.ObjectWrite.FromString, + response_serializer=storage__pb2.EmptyResponse.SerializeToString, + ), + "Read": grpc.unary_unary_rpc_method_handler( + servicer.Read, + request_deserializer=storage__pb2.ObjectRead.FromString, + response_serializer=storage__pb2.EmptyResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "storage_bench.StorageBenchWrapper", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/storage/tests/system.py b/storage/tests/system.py index c2717fb2b1af..2bfaa5b8f492 100644 --- a/storage/tests/system.py +++ b/storage/tests/system.py @@ -12,7 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +import base64 import datetime +import gzip +import hashlib +import io import os import re import tempfile @@ -32,10 +36,10 @@ from test_utils.retry import RetryErrors from test_utils.system import unique_resource_id +from test_utils.vpcsc_config import vpcsc_config USER_PROJECT = os.environ.get("GOOGLE_CLOUD_TESTS_USER_PROJECT") -RUNNING_IN_VPCSC = os.getenv("GOOGLE_CLOUD_TESTS_IN_VPCSC", "").lower() == "true" def _bad_copy(bad_request): @@ -79,7 +83,7 @@ def setUpModule(): # fails with a ConnectionError. Config.TEST_BUCKET = Config.CLIENT.bucket(bucket_name) Config.TEST_BUCKET.versioning_enabled = True - retry_429(Config.TEST_BUCKET.create)() + retry_429_503(Config.TEST_BUCKET.create)() def tearDownModule(): @@ -166,7 +170,7 @@ def test_create_bucket(self): self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) - created = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) + created = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) self.assertEqual(created.name, new_bucket_name) @@ -188,7 +192,7 @@ def test_lifecycle_rules(self): ), ] - retry_429(bucket.create)(location="us") + retry_429_503(bucket.create)(location="us") self.case_buckets_to_delete.append(new_bucket_name) self.assertEqual(bucket.name, new_bucket_name) @@ -208,7 +212,7 @@ def test_list_buckets(self): created_buckets = [] for bucket_name in buckets_to_create: bucket = Config.CLIENT.bucket(bucket_name) - retry_429(bucket.create)() + retry_429_503(bucket.create)() self.case_buckets_to_delete.append(bucket_name) # Retrieve the buckets. @@ -220,7 +224,7 @@ def test_list_buckets(self): def test_bucket_update_labels(self): bucket_name = "update-labels" + unique_resource_id("-") - bucket = retry_429(Config.CLIENT.create_bucket)(bucket_name) + bucket = retry_429_503(Config.CLIENT.create_bucket)(bucket_name) self.case_buckets_to_delete.append(bucket_name) self.assertTrue(bucket.exists()) @@ -241,7 +245,7 @@ def test_bucket_update_labels(self): @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_crud_bucket_with_requester_pays(self): new_bucket_name = "w-requester-pays" + unique_resource_id("-") - created = retry_429(Config.CLIENT.create_bucket)( + created = retry_429_503(Config.CLIENT.create_bucket)( new_bucket_name, requester_pays=True ) self.case_buckets_to_delete.append(new_bucket_name) @@ -284,7 +288,7 @@ def test_crud_bucket_with_requester_pays(self): @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_bucket_acls_iam_with_user_project(self): new_bucket_name = "acl-w-user-project" + unique_resource_id("-") - retry_429(Config.CLIENT.create_bucket)(new_bucket_name, requester_pays=True) + retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name, requester_pays=True) self.case_buckets_to_delete.append(new_bucket_name) with_user_project = Config.CLIENT.bucket( @@ -322,7 +326,7 @@ def test_bucket_acls_iam_with_user_project(self): @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_copy_existing_file_with_user_project(self): new_bucket_name = "copy-w-requester-pays" + unique_resource_id("-") - created = retry_429(Config.CLIENT.create_bucket)( + created = retry_429_503(Config.CLIENT.create_bucket)( new_bucket_name, requester_pays=True ) self.case_buckets_to_delete.append(new_bucket_name) @@ -354,7 +358,7 @@ def test_copy_existing_file_with_user_project(self): def test_bucket_get_blob_with_user_project(self): new_bucket_name = "w-requester-pays" + unique_resource_id("-") data = b"DEADBEEF" - created = retry_429(Config.CLIENT.create_bucket)( + created = retry_429_503(Config.CLIENT.create_bucket)( new_bucket_name, requester_pays=True ) self.case_buckets_to_delete.append(new_bucket_name) @@ -618,9 +622,26 @@ def test_download_blob_w_uri(self): self.assertEqual(file_contents, stored_contents) + def test_upload_gzip_encoded_download_raw(self): + payload = b"DEADBEEF" * 1000 + raw_stream = io.BytesIO() + with gzip.GzipFile(fileobj=raw_stream, mode="wb") as gzip_stream: + gzip_stream.write(payload) + zipped = raw_stream.getvalue() + + blob = self.bucket.blob("test_gzipped.gz") + blob.content_encoding = "gzip" + blob.upload_from_file(raw_stream, rewind=True) + + expanded = blob.download_as_string() + self.assertEqual(expanded, payload) + + raw = blob.download_as_string(raw_download=True) + self.assertEqual(raw, zipped) + class TestUnicode(unittest.TestCase): - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_fetch_object_and_check_content(self): client = storage.Client() bucket = client.bucket("storage-library-test-bucket") @@ -803,7 +824,7 @@ def setUpClass(cls): cls.skipTest("Signing tests requires a service account credential") bucket_name = "gcp-signing" + unique_resource_id() - cls.bucket = retry_429(Config.CLIENT.create_bucket)(bucket_name) + cls.bucket = retry_429_503(Config.CLIENT.create_bucket)(bucket_name) cls.blob = cls.bucket.blob("README.txt") cls.blob.upload_from_string(cls.BLOB_CONTENT) @@ -860,11 +881,12 @@ def _create_signed_read_url_helper( version="v2", payload=None, expiration=None, + encryption_key=None, ): expiration = self._morph_expiration(version, expiration) if payload is not None: - blob = self.bucket.blob(blob_name) + blob = self.bucket.blob(blob_name, encryption_key=encryption_key) blob.upload_from_string(payload) else: blob = self.blob @@ -873,7 +895,17 @@ def _create_signed_read_url_helper( expiration=expiration, method=method, client=Config.CLIENT, version=version ) - response = requests.get(signed_url) + headers = {} + + if encryption_key is not None: + headers["x-goog-encryption-algorithm"] = "AES256" + encoded_key = base64.b64encode(encryption_key).decode("utf-8") + headers["x-goog-encryption-key"] = encoded_key + key_hash = hashlib.sha256(encryption_key).digest() + key_hash = base64.b64encode(key_hash).decode("utf-8") + headers["x-goog-encryption-key-sha256"] = key_hash + + response = requests.get(signed_url, headers=headers) self.assertEqual(response.status_code, 200) if payload is not None: self.assertEqual(response.content, payload) @@ -916,6 +948,23 @@ def test_create_signed_read_url_v4_w_non_ascii_name(self): version="v4", ) + def test_create_signed_read_url_v2_w_csek(self): + encryption_key = os.urandom(32) + self._create_signed_read_url_helper( + blob_name="v2-w-csek.txt", + payload=b"Test signed URL for blob w/ CSEK", + encryption_key=encryption_key, + ) + + def test_create_signed_read_url_v4_w_csek(self): + encryption_key = os.urandom(32) + self._create_signed_read_url_helper( + blob_name="v2-w-csek.txt", + payload=b"Test signed URL for blob w/ CSEK", + encryption_key=encryption_key, + version="v4", + ) + def _create_signed_delete_url_helper(self, version="v2", expiration=None): expiration = self._morph_expiration(version, expiration) @@ -1054,7 +1103,7 @@ def test_compose_replace_existing_blob(self): @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_compose_with_user_project(self): new_bucket_name = "compose-user-project" + unique_resource_id("-") - created = retry_429(Config.CLIENT.create_bucket)( + created = retry_429_503(Config.CLIENT.create_bucket)( new_bucket_name, requester_pays=True ) try: @@ -1129,7 +1178,7 @@ def test_rewrite_rotate_encryption_key(self): def test_rewrite_add_key_with_user_project(self): file_data = self.FILES["simple"] new_bucket_name = "rewrite-key-up" + unique_resource_id("-") - created = retry_429(Config.CLIENT.create_bucket)( + created = retry_429_503(Config.CLIENT.create_bucket)( new_bucket_name, requester_pays=True ) try: @@ -1158,7 +1207,7 @@ def test_rewrite_rotate_with_user_project(self): BLOB_NAME = "rotating-keys" file_data = self.FILES["simple"] new_bucket_name = "rewrite-rotate-up" + unique_resource_id("-") - created = retry_429(Config.CLIENT.create_bucket)( + created = retry_429_503(Config.CLIENT.create_bucket)( new_bucket_name, requester_pays=True ) try: @@ -1270,7 +1319,7 @@ def payload_format(): def test_notification_minimal(self): new_bucket_name = "notification-minimal" + unique_resource_id("-") - bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) + bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) self.assertEqual(list(bucket.list_notifications()), []) notification = bucket.notification(self.TOPIC_NAME) @@ -1286,7 +1335,7 @@ def test_notification_minimal(self): def test_notification_explicit(self): new_bucket_name = "notification-explicit" + unique_resource_id("-") - bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) + bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) notification = bucket.notification( self.TOPIC_NAME, @@ -1309,7 +1358,7 @@ def test_notification_explicit(self): @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_notification_w_user_project(self): new_bucket_name = "notification-minimal" + unique_resource_id("-") - retry_429(Config.CLIENT.create_bucket)(new_bucket_name, requester_pays=True) + retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name, requester_pays=True) self.case_buckets_to_delete.append(new_bucket_name) with_user_project = Config.CLIENT.bucket( new_bucket_name, user_project=USER_PROJECT @@ -1331,7 +1380,7 @@ class TestAnonymousClient(unittest.TestCase): PUBLIC_BUCKET = "gcp-public-data-landsat" - @unittest.skipIf(RUNNING_IN_VPCSC, "Test is not VPCSC compatible.") + @vpcsc_config.skip_if_inside_vpcsc def test_access_to_public_bucket(self): anonymous = storage.Client.create_anonymous_client() bucket = anonymous.bucket(self.PUBLIC_BUCKET) @@ -1531,7 +1580,7 @@ def test_bucket_w_retention_period(self): period_secs = 10 new_bucket_name = "w-retention-period" + unique_resource_id("-") - bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) + bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) bucket.retention_period = period_secs @@ -1580,7 +1629,7 @@ def test_bucket_w_default_event_based_hold(self): self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) - bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) + bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) bucket.default_event_based_hold = True @@ -1632,7 +1681,7 @@ def test_blob_w_temporary_hold(self): self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) - bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) + bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) blob_name = "test-blob" @@ -1666,7 +1715,7 @@ def test_bucket_lock_retention_policy(self): self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) - bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) + bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) bucket.retention_period = period_secs @@ -1696,14 +1745,14 @@ def tearDown(self): bucket = Config.CLIENT.bucket(bucket_name) retry_429_harder(bucket.delete)(force=True) - def test_new_bucket_w_bpo(self): - new_bucket_name = "new-w-bpo" + unique_resource_id("-") + def test_new_bucket_w_ubla(self): + new_bucket_name = "new-w-ubla" + unique_resource_id("-") self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) bucket = Config.CLIENT.bucket(new_bucket_name) - bucket.iam_configuration.bucket_policy_only_enabled = True - retry_429(bucket.create)() + bucket.iam_configuration.uniform_bucket_level_access_enabled = True + retry_429_503(bucket.create)() self.case_buckets_to_delete.append(new_bucket_name) bucket_acl = bucket.acl @@ -1732,13 +1781,12 @@ def test_new_bucket_w_bpo(self): with self.assertRaises(exceptions.BadRequest): blob_acl.save() - @unittest.skipUnless(False, "Back-end fix for BPO/UBLA expected 2019-07-12") - def test_bpo_set_unset_preserves_acls(self): - new_bucket_name = "bpo-acls" + unique_resource_id("-") + def test_ubla_set_unset_preserves_acls(self): + new_bucket_name = "ubla-acls" + unique_resource_id("-") self.assertRaises( exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name ) - bucket = retry_429(Config.CLIENT.create_bucket)(new_bucket_name) + bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name) self.case_buckets_to_delete.append(new_bucket_name) blob_name = "my-blob.txt" @@ -1746,25 +1794,25 @@ def test_bpo_set_unset_preserves_acls(self): payload = b"DEADBEEF" blob.upload_from_string(payload) - # Preserve ACLs before setting BPO + # Preserve ACLs before setting UBLA bucket_acl_before = list(bucket.acl) blob_acl_before = list(bucket.acl) - # Set BPO - bucket.iam_configuration.bucket_policy_only_enabled = True + # Set UBLA + bucket.iam_configuration.uniform_bucket_level_access_enabled = True bucket.patch() - self.assertTrue(bucket.iam_configuration.bucket_policy_only_enabled) + self.assertTrue(bucket.iam_configuration.uniform_bucket_level_access_enabled) - # While BPO is set, cannot get / set ACLs + # While UBLA is set, cannot get / set ACLs with self.assertRaises(exceptions.BadRequest): bucket.acl.reload() - # Clear BPO - bucket.iam_configuration.bucket_policy_only_enabled = False + # Clear UBLA + bucket.iam_configuration.uniform_bucket_level_access_enabled = False bucket.patch() - # Query ACLs after clearing BPO + # Query ACLs after clearing UBLA bucket.acl.reload() bucket_acl_after = list(bucket.acl) blob.acl.reload() diff --git a/storage/tests/unit/test__helpers.py b/storage/tests/unit/test__helpers.py index fb8d78cbb637..9b75b0e67fbe 100644 --- a/storage/tests/unit/test__helpers.py +++ b/storage/tests/unit/test__helpers.py @@ -14,6 +14,34 @@ import unittest +import mock + + +class Test__get_storage_host(unittest.TestCase): + @staticmethod + def _call_fut(): + from google.cloud.storage._helpers import _get_storage_host + + return _get_storage_host() + + def test_wo_env_var(self): + from google.cloud.storage._helpers import _DEFAULT_STORAGE_HOST + + with mock.patch("os.environ", {}): + host = self._call_fut() + + self.assertEqual(host, _DEFAULT_STORAGE_HOST) + + def test_w_env_var(self): + from google.cloud.storage._helpers import STORAGE_EMULATOR_ENV_VAR + + HOST = "https://api.example.com" + + with mock.patch("os.environ", {STORAGE_EMULATOR_ENV_VAR: HOST}): + host = self._call_fut() + + self.assertEqual(host, HOST) + class Test_PropertyMixin(unittest.TestCase): @staticmethod diff --git a/storage/tests/unit/test_blob.py b/storage/tests/unit/test_blob.py index 1b38cc67b71f..0c66d58b3a37 100644 --- a/storage/tests/unit/test_blob.py +++ b/storage/tests/unit/test_blob.py @@ -21,7 +21,6 @@ import tempfile import unittest -import google.cloud.storage.blob import mock import pytest import six @@ -391,10 +390,12 @@ def _generate_signed_url_helper( query_parameters=None, credentials=None, expiration=None, + encryption_key=None, ): from six.moves.urllib import parse from google.cloud._helpers import UTC from google.cloud.storage.blob import _API_ACCESS_ENDPOINT + from google.cloud.storage.blob import _get_encryption_headers api_access_endpoint = api_access_endpoint or _API_ACCESS_ENDPOINT @@ -406,7 +407,7 @@ def _generate_signed_url_helper( connection = _Connection() client = _Client(connection) bucket = _Bucket(client) - blob = self._make_one(blob_name, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket, encryption_key=encryption_key) if version is None: effective_version = "v2" @@ -442,6 +443,15 @@ def _generate_signed_url_helper( encoded_name = blob_name.encode("utf-8") expected_resource = "/name/{}".format(parse.quote(encoded_name, safe=b"/~")) + if encryption_key is not None: + expected_headers = headers or {} + if effective_version == "v2": + expected_headers["X-Goog-Encryption-Algorithm"] = "AES256" + else: + expected_headers.update(_get_encryption_headers(encryption_key)) + else: + expected_headers = headers + expected_kwargs = { "resource": expected_resource, "expiration": expiration, @@ -452,7 +462,7 @@ def _generate_signed_url_helper( "response_type": response_type, "response_disposition": response_disposition, "generation": generation, - "headers": headers, + "headers": expected_headers, "query_parameters": query_parameters, } signer.assert_called_once_with(expected_creds, **expected_kwargs) @@ -514,6 +524,14 @@ def test_generate_signed_url_v2_w_generation(self): def test_generate_signed_url_v2_w_headers(self): self._generate_signed_url_v2_helper(headers={"x-goog-foo": "bar"}) + def test_generate_signed_url_v2_w_csek(self): + self._generate_signed_url_v2_helper(encryption_key=os.urandom(32)) + + def test_generate_signed_url_v2_w_csek_and_headers(self): + self._generate_signed_url_v2_helper( + encryption_key=os.urandom(32), headers={"x-goog-foo": "bar"} + ) + def test_generate_signed_url_v2_w_credentials(self): credentials = object() self._generate_signed_url_v2_helper(credentials=credentials) @@ -566,6 +584,14 @@ def test_generate_signed_url_v4_w_generation(self): def test_generate_signed_url_v4_w_headers(self): self._generate_signed_url_v4_helper(headers={"x-goog-foo": "bar"}) + def test_generate_signed_url_v4_w_csek(self): + self._generate_signed_url_v4_helper(encryption_key=os.urandom(32)) + + def test_generate_signed_url_v4_w_csek_and_headers(self): + self._generate_signed_url_v4_helper( + encryption_key=os.urandom(32), headers={"x-goog-foo": "bar"} + ) + def test_generate_signed_url_v4_w_credentials(self): credentials = object() self._generate_signed_url_v4_helper(credentials=credentials) @@ -698,7 +724,7 @@ def test__get_download_url_on_the_fly(self): self.assertIsNone(blob.media_link) download_url = blob._get_download_url() expected_url = ( - "https://www.googleapis.com/download/storage/v1/b/" + "https://storage.googleapis.com/download/storage/v1/b/" "buhkit/o/bzzz-fly.txt?alt=media" ) self.assertEqual(download_url, expected_url) @@ -714,7 +740,7 @@ def test__get_download_url_on_the_fly_with_generation(self): self.assertIsNone(blob.media_link) download_url = blob._get_download_url() expected_url = ( - "https://www.googleapis.com/download/storage/v1/b/" + "https://storage.googleapis.com/download/storage/v1/b/" "fictional/o/pretend.txt?alt=media&generation=1493058489532987" ) self.assertEqual(download_url, expected_url) @@ -728,7 +754,7 @@ def test__get_download_url_on_the_fly_with_user_project(self): self.assertIsNone(blob.media_link) download_url = blob._get_download_url() expected_url = ( - "https://www.googleapis.com/download/storage/v1/b/" + "https://storage.googleapis.com/download/storage/v1/b/" "fictional/o/pretend.txt?alt=media&userProject={}".format(user_project) ) self.assertEqual(download_url, expected_url) @@ -747,311 +773,246 @@ def test__get_download_url_on_the_fly_with_kms_key_name(self): self.assertIsNone(blob.media_link) download_url = blob._get_download_url() expected_url = ( - "https://www.googleapis.com/download/storage/v1/b/" + "https://storage.googleapis.com/download/storage/v1/b/" "buhkit/o/bzzz-fly.txt?alt=media" ) self.assertEqual(download_url, expected_url) @staticmethod - def _mock_requests_response(status_code, headers, content=b"", stream=False): + def _mock_requests_response(status_code, headers, content=b""): import requests response = requests.Response() response.status_code = status_code response.headers.update(headers) - if stream: - raw = io.BytesIO(content) - raw.headers = headers - response.raw = raw - response._content = False - else: - response.raw = None - response._content = content + response.raw = None + response._content = content response.request = requests.Request("POST", "http://example.com").prepare() return response - def _mock_download_transport(self): - fake_transport = mock.Mock(spec=["request"]) - # Give the transport two fake responses. - chunk1_response = self._mock_requests_response( - http_client.PARTIAL_CONTENT, - {"content-length": "3", "content-range": "bytes 0-2/6"}, - content=b"abc", - ) - chunk2_response = self._mock_requests_response( - http_client.PARTIAL_CONTENT, - {"content-length": "3", "content-range": "bytes 3-5/6"}, - content=b"def", - ) - fake_transport.request.side_effect = [chunk1_response, chunk2_response] - return fake_transport - - def _mock_download_transport_range(self): - fake_transport = mock.Mock(spec=["request"]) - # Give the transport two fake responses. - chunk1_response = self._mock_requests_response( - http_client.PARTIAL_CONTENT, - {"content-length": "2", "content-range": "bytes 1-2/6"}, - content=b"bc", - ) - chunk2_response = self._mock_requests_response( - http_client.PARTIAL_CONTENT, - {"content-length": "2", "content-range": "bytes 3-4/6"}, - content=b"de", - ) - fake_transport.request.side_effect = [chunk1_response, chunk2_response] - return fake_transport - - def _check_session_mocks(self, client, transport, expected_url, headers=None): - # Check that the transport was called exactly twice. - self.assertEqual(transport.request.call_count, 2) - if headers is None: - headers = {} - # NOTE: bytes=0-2 never shows up because the mock was called with - # **MUTABLE** headers and it was mutated before the - # second request. - headers["range"] = "bytes=3-5" - headers["accept-encoding"] = "gzip" - call = mock.call( - "GET", expected_url, data=None, headers=headers, timeout=mock.ANY - ) - self.assertEqual(transport.request.mock_calls, [call, call]) - - def test__do_download_simple(self): + def _do_download_helper_wo_chunks(self, w_range, raw_download): blob_name = "blob-name" - # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"]) + client = mock.Mock() bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) - - # Make sure this will not be chunked. self.assertIsNone(blob.chunk_size) - transport = mock.Mock(spec=["request"]) - transport.request.return_value = self._mock_requests_response( - http_client.OK, - {"content-length": "6", "content-range": "bytes 0-5/6"}, - content=b"abcdef", - stream=True, - ) + transport = object() file_obj = io.BytesIO() download_url = "http://test.invalid" headers = {} - blob._do_download(transport, file_obj, download_url, headers) - # Make sure the download was as expected. - self.assertEqual(file_obj.getvalue(), b"abcdef") - transport.request.assert_called_once_with( - "GET", - download_url, - data=None, - headers=headers, - stream=True, - timeout=mock.ANY, - ) + if raw_download: + patch = mock.patch("google.cloud.storage.blob.RawDownload") + else: + patch = mock.patch("google.cloud.storage.blob.Download") + + with patch as patched: + if w_range: + blob._do_download( + transport, + file_obj, + download_url, + headers, + start=1, + end=3, + raw_download=raw_download, + ) + else: + blob._do_download( + transport, + file_obj, + download_url, + headers, + raw_download=raw_download, + ) - def test__do_download_simple_with_range(self): - blob_name = "blob-name" - # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"]) - bucket = _Bucket(client) - blob = self._make_one(blob_name, bucket=bucket) + if w_range: + patched.assert_called_once_with( + download_url, stream=file_obj, headers=headers, start=1, end=3 + ) + else: + patched.assert_called_once_with( + download_url, stream=file_obj, headers=headers, start=None, end=None + ) + patched.return_value.consume.assert_called_once_with(transport) - # Make sure this will not be chunked. - self.assertIsNone(blob.chunk_size) + def test__do_download_wo_chunks_wo_range_wo_raw(self): + self._do_download_helper_wo_chunks(w_range=False, raw_download=False) - transport = mock.Mock(spec=["request"]) - transport.request.return_value = self._mock_requests_response( - http_client.OK, - {"content-length": "3", "content-range": "bytes 1-3"}, - content=b"bcd", - stream=True, - ) - file_obj = io.BytesIO() - download_url = "http://test.invalid" - headers = {} - blob._do_download(transport, file_obj, download_url, headers, start=1, end=3) - # Make sure the download was as expected. - self.assertEqual(file_obj.getvalue(), b"bcd") - self.assertEqual(headers["range"], "bytes=1-3") + def test__do_download_wo_chunks_w_range_wo_raw(self): + self._do_download_helper_wo_chunks(w_range=True, raw_download=False) - transport.request.assert_called_once_with( - "GET", - download_url, - data=None, - headers=headers, - stream=True, - timeout=mock.ANY, - ) + def test__do_download_wo_chunks_wo_range_w_raw(self): + self._do_download_helper_wo_chunks(w_range=False, raw_download=True) + + def test__do_download_wo_chunks_w_range_w_raw(self): + self._do_download_helper_wo_chunks(w_range=True, raw_download=True) - def test__do_download_chunked(self): + def _do_download_helper_w_chunks(self, w_range, raw_download): blob_name = "blob-name" - # Create a fake client/bucket and use them in the Blob() constructor. client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"]) bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) - - # Modify the blob so there there will be 2 chunks of size 3. blob._CHUNK_SIZE_MULTIPLE = 1 - blob.chunk_size = 3 + chunk_size = blob.chunk_size = 3 - transport = self._mock_download_transport() + transport = object() file_obj = io.BytesIO() download_url = "http://test.invalid" headers = {} - blob._do_download(transport, file_obj, download_url, headers) - # Make sure the download was as expected. - self.assertEqual(file_obj.getvalue(), b"abcdef") - # Check that the transport was called exactly twice. - self.assertEqual(transport.request.call_count, 2) - # ``headers`` was modified (in place) once for each API call. - self.assertEqual(headers, {"range": "bytes=3-5"}) - call = mock.call( - "GET", download_url, data=None, headers=headers, timeout=mock.ANY - ) - self.assertEqual(transport.request.mock_calls, [call, call]) + download = mock.Mock(finished=False, spec=["finished", "consume_next_chunk"]) - def test__do_download_chunked_with_range(self): - blob_name = "blob-name" - # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"]) - bucket = _Bucket(client) - blob = self._make_one(blob_name, bucket=bucket) + def side_effect(_): + download.finished = True - # Modify the blob so there there will be 2 chunks of size 2. - blob._CHUNK_SIZE_MULTIPLE = 1 - blob.chunk_size = 2 + download.consume_next_chunk.side_effect = side_effect - transport = self._mock_download_transport_range() - file_obj = io.BytesIO() - download_url = "http://test.invalid" - headers = {} - blob._do_download(transport, file_obj, download_url, headers, start=1, end=4) - # Make sure the download was as expected. - self.assertEqual(file_obj.getvalue(), b"bcde") + if raw_download: + patch = mock.patch("google.cloud.storage.blob.RawChunkedDownload") + else: + patch = mock.patch("google.cloud.storage.blob.ChunkedDownload") + + with patch as patched: + patched.return_value = download + if w_range: + blob._do_download( + transport, + file_obj, + download_url, + headers, + start=1, + end=3, + raw_download=raw_download, + ) + else: + blob._do_download( + transport, + file_obj, + download_url, + headers, + raw_download=raw_download, + ) - # Check that the transport was called exactly twice. - self.assertEqual(transport.request.call_count, 2) - # ``headers`` was modified (in place) once for each API call. - self.assertEqual(headers, {"range": "bytes=3-4"}) - call = mock.call( - "GET", download_url, data=None, headers=headers, timeout=mock.ANY - ) - self.assertEqual(transport.request.mock_calls, [call, call]) + if w_range: + patched.assert_called_once_with( + download_url, chunk_size, file_obj, headers=headers, start=1, end=3 + ) + else: + patched.assert_called_once_with( + download_url, chunk_size, file_obj, headers=headers, start=0, end=None + ) + download.consume_next_chunk.assert_called_once_with(transport) + + def test__do_download_w_chunks_wo_range_wo_raw(self): + self._do_download_helper_w_chunks(w_range=False, raw_download=False) + + def test__do_download_w_chunks_w_range_wo_raw(self): + self._do_download_helper_w_chunks(w_range=True, raw_download=False) + + def test__do_download_w_chunks_wo_range_w_raw(self): + self._do_download_helper_w_chunks(w_range=False, raw_download=True) + + def test__do_download_w_chunks_w_range_w_raw(self): + self._do_download_helper_w_chunks(w_range=True, raw_download=True) def test_download_to_file_with_failure(self): + import requests + from google.resumable_media import InvalidResponse from google.cloud import exceptions + raw_response = requests.Response() + raw_response.status_code = http_client.NOT_FOUND + raw_request = requests.Request("GET", "http://example.com") + raw_response.request = raw_request.prepare() + grmp_response = InvalidResponse(raw_response) + blob_name = "blob-name" - transport = mock.Mock(spec=["request"]) - bad_response_headers = { - "Content-Length": "9", - "Content-Type": "text/html; charset=UTF-8", - } - transport.request.return_value = self._mock_requests_response( - http_client.NOT_FOUND, bad_response_headers, content=b"Not found" - ) - # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=[u"_http"]) + media_link = "http://test.invalid" + client = mock.Mock(spec=[u"_http"]) bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) - # Set the media link on the blob - blob._properties["mediaLink"] = "http://test.invalid" + blob._properties["mediaLink"] = media_link + blob._do_download = mock.Mock() + blob._do_download.side_effect = grmp_response file_obj = io.BytesIO() with self.assertRaises(exceptions.NotFound): blob.download_to_file(file_obj) self.assertEqual(file_obj.tell(), 0) - # Check that the transport was called once. - transport.request.assert_called_once_with( - "GET", - blob.media_link, - data=None, - headers={"accept-encoding": "gzip"}, - stream=True, - timeout=mock.ANY, + + headers = {"accept-encoding": "gzip"} + blob._do_download.assert_called_once_with( + client._http, file_obj, media_link, headers, None, None, False ) def test_download_to_file_wo_media_link(self): blob_name = "blob-name" - transport = self._mock_download_transport() - # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=[u"_http"]) + client = mock.Mock(spec=[u"_http"]) bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) - # Modify the blob so there there will be 2 chunks of size 3. - blob._CHUNK_SIZE_MULTIPLE = 1 - blob.chunk_size = 3 - + blob._do_download = mock.Mock() file_obj = io.BytesIO() + blob.download_to_file(file_obj) - self.assertEqual(file_obj.getvalue(), b"abcdef") + # Make sure the media link is still unknown. self.assertIsNone(blob.media_link) expected_url = ( - "https://www.googleapis.com/download/storage/v1/b/" + "https://storage.googleapis.com/download/storage/v1/b/" "name/o/blob-name?alt=media" ) - self._check_session_mocks(client, transport, expected_url) + headers = {"accept-encoding": "gzip"} + blob._do_download.assert_called_once_with( + client._http, file_obj, expected_url, headers, None, None, False + ) - def _download_to_file_helper(self, use_chunks=False): + def _download_to_file_helper(self, use_chunks, raw_download): blob_name = "blob-name" - transport = self._mock_download_transport() - # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=[u"_http"]) + client = mock.Mock(spec=[u"_http"]) bucket = _Bucket(client) media_link = "http://example.com/media/" properties = {"mediaLink": media_link} blob = self._make_one(blob_name, bucket=bucket, properties=properties) if use_chunks: - # Modify the blob so there there will be 2 chunks of size 3. blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 3 - else: - # Modify the response. - single_chunk_response = self._mock_requests_response( - http_client.OK, - {"content-length": "6", "content-range": "bytes 0-5/6"}, - content=b"abcdef", - stream=True, - ) - transport.request.side_effect = [single_chunk_response] + blob._do_download = mock.Mock() file_obj = io.BytesIO() - blob.download_to_file(file_obj) - self.assertEqual(file_obj.getvalue(), b"abcdef") - - if use_chunks: - self._check_session_mocks(client, transport, media_link) + if raw_download: + blob.download_to_file(file_obj, raw_download=True) else: - transport.request.assert_called_once_with( - "GET", - media_link, - data=None, - headers={"accept-encoding": "gzip"}, - stream=True, - timeout=mock.ANY, - ) + blob.download_to_file(file_obj) - def test_download_to_file_default(self): - self._download_to_file_helper() + headers = {"accept-encoding": "gzip"} + blob._do_download.assert_called_once_with( + client._http, file_obj, media_link, headers, None, None, raw_download + ) + + def test_download_to_file_wo_chunks_wo_raw(self): + self._download_to_file_helper(use_chunks=False, raw_download=False) + + def test_download_to_file_w_chunks_wo_raw(self): + self._download_to_file_helper(use_chunks=True, raw_download=False) - def test_download_to_file_with_chunk_size(self): - self._download_to_file_helper(use_chunks=True) + def test_download_to_file_wo_chunks_w_raw(self): + self._download_to_file_helper(use_chunks=False, raw_download=True) - def _download_to_filename_helper(self, updated=None): + def test_download_to_file_w_chunks_w_raw(self): + self._download_to_file_helper(use_chunks=True, raw_download=True) + + def _download_to_filename_helper(self, updated, raw_download): import os import time from google.cloud._testing import _NamedTemporaryFile blob_name = "blob-name" - transport = self._mock_download_transport() - # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=["_http"]) + client = mock.Mock(spec=["_http"]) bucket = _Bucket(client) media_link = "http://example.com/media/" properties = {"mediaLink": media_link} @@ -1059,162 +1020,122 @@ def _download_to_filename_helper(self, updated=None): properties["updated"] = updated blob = self._make_one(blob_name, bucket=bucket, properties=properties) - # Modify the blob so there there will be 2 chunks of size 3. - blob._CHUNK_SIZE_MULTIPLE = 1 - blob.chunk_size = 3 + blob._do_download = mock.Mock() with _NamedTemporaryFile() as temp: - blob.download_to_filename(temp.name) - with open(temp.name, "rb") as file_obj: - wrote = file_obj.read() - if updated is None: - self.assertIsNone(blob.updated) - else: - mtime = os.path.getmtime(temp.name) - updated_time = time.mktime(blob.updated.timetuple()) - self.assertEqual(mtime, updated_time) + blob.download_to_filename(temp.name, raw_download=raw_download) + if updated is None: + self.assertIsNone(blob.updated) + else: + mtime = os.path.getmtime(temp.name) + updated_time = time.mktime(blob.updated.timetuple()) + self.assertEqual(mtime, updated_time) - self.assertEqual(wrote, b"abcdef") + headers = {"accept-encoding": "gzip"} + blob._do_download.assert_called_once_with( + client._http, mock.ANY, media_link, headers, None, None, raw_download + ) + stream = blob._do_download.mock_calls[0].args[1] + self.assertEqual(stream.name, temp.name) - self._check_session_mocks(client, transport, media_link) + def test_download_to_filename_w_updated_wo_raw(self): + updated = "2014-12-06T13:13:50.690Z" + self._download_to_filename_helper(updated=updated, raw_download=False) - def test_download_to_filename(self): + def test_download_to_filename_wo_updated_wo_raw(self): + self._download_to_filename_helper(updated=None, raw_download=False) + + def test_download_to_filename_w_updated_w_raw(self): updated = "2014-12-06T13:13:50.690Z" - self._download_to_filename_helper(updated=updated) + self._download_to_filename_helper(updated=updated, raw_download=True) - def test_download_to_filename_wo_updated(self): - self._download_to_filename_helper() + def test_download_to_filename_wo_updated_w_raw(self): + self._download_to_filename_helper(updated=None, raw_download=True) def test_download_to_filename_corrupted(self): from google.resumable_media import DataCorruption - from google.resumable_media.requests.download import _CHECKSUM_MISMATCH blob_name = "blob-name" - transport = mock.Mock(spec=["request"]) - empty_hash = base64.b64encode(hashlib.md5(b"").digest()).decode(u"utf-8") - headers = {"x-goog-hash": "md5=" + empty_hash} - mock_raw = mock.Mock(headers=headers, spec=["headers"]) - response = mock.MagicMock( - headers=headers, - status_code=http_client.OK, - raw=mock_raw, - spec=[ - "__enter__", - "__exit__", - "headers", - "iter_content", - "status_code", - "raw", - ], - ) - # i.e. context manager returns ``self``. - response.__enter__.return_value = response - response.__exit__.return_value = None - chunks = (b"noms1", b"coooookies2") - response.iter_content.return_value = iter(chunks) - - transport.request.return_value = response - # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=["_http"]) - bucket = mock.Mock( - client=client, user_project=None, spec=["client", "user_project"] - ) + client = mock.Mock(spec=["_http"]) + bucket = _Bucket(client) media_link = "http://example.com/media/" properties = {"mediaLink": media_link} - blob = self._make_one(blob_name, bucket=bucket, properties=properties) - # Make sure the download is **not** chunked. - self.assertIsNone(blob.chunk_size) - # Make sure the hash will be wrong. - content = b"".join(chunks) - expected_hash = base64.b64encode(hashlib.md5(content).digest()).decode(u"utf-8") - self.assertNotEqual(empty_hash, expected_hash) + blob = self._make_one(blob_name, bucket=bucket, properties=properties) + blob._do_download = mock.Mock() + blob._do_download.side_effect = DataCorruption("testing") # Try to download into a temporary file (don't use # `_NamedTemporaryFile` it will try to remove after the file is # already removed) filehandle, filename = tempfile.mkstemp() os.close(filehandle) - with self.assertRaises(DataCorruption) as exc_info: + self.assertTrue(os.path.exists(filename)) + + with self.assertRaises(DataCorruption): blob.download_to_filename(filename) - msg = _CHECKSUM_MISMATCH.format(media_link, empty_hash, expected_hash) - self.assertEqual(exc_info.exception.args, (msg,)) # Make sure the file was cleaned up. self.assertFalse(os.path.exists(filename)) - # Check the mocks. - response.__enter__.assert_called_once_with() - response.__exit__.assert_called_once_with(None, None, None) - response.iter_content.assert_called_once_with( - chunk_size=8192, decode_unicode=False - ) - transport.request.assert_called_once_with( - "GET", - media_link, - data=None, - headers={"accept-encoding": "gzip"}, - stream=True, - timeout=mock.ANY, + headers = {"accept-encoding": "gzip"} + blob._do_download.assert_called_once_with( + client._http, mock.ANY, media_link, headers, None, None, False ) + stream = blob._do_download.mock_calls[0].args[1] + self.assertEqual(stream.name, filename) def test_download_to_filename_w_key(self): - import os - import time from google.cloud._testing import _NamedTemporaryFile + from google.cloud.storage.blob import _get_encryption_headers blob_name = "blob-name" - transport = self._mock_download_transport() # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=["_http"]) + client = mock.Mock(spec=["_http"]) bucket = _Bucket(client) media_link = "http://example.com/media/" - properties = {"mediaLink": media_link, "updated": "2014-12-06T13:13:50.690Z"} + properties = {"mediaLink": media_link} key = b"aa426195405adee2c8081bb9e7e74b19" blob = self._make_one( blob_name, bucket=bucket, properties=properties, encryption_key=key ) - # Modify the blob so there there will be 2 chunks of size 3. - blob._CHUNK_SIZE_MULTIPLE = 1 - blob.chunk_size = 3 + blob._do_download = mock.Mock() with _NamedTemporaryFile() as temp: blob.download_to_filename(temp.name) - with open(temp.name, "rb") as file_obj: - wrote = file_obj.read() - mtime = os.path.getmtime(temp.name) - updated_time = time.mktime(blob.updated.timetuple()) - self.assertEqual(wrote, b"abcdef") - self.assertEqual(mtime, updated_time) - - header_key_value = "YWE0MjYxOTU0MDVhZGVlMmM4MDgxYmI5ZTdlNzRiMTk=" - header_key_hash_value = "V3Kwe46nKc3xLv96+iJ707YfZfFvlObta8TQcx2gpm0=" - key_headers = { - "X-Goog-Encryption-Key-Sha256": header_key_hash_value, - "X-Goog-Encryption-Algorithm": "AES256", - "X-Goog-Encryption-Key": header_key_value, - "accept-encoding": "gzip", - } - self._check_session_mocks(client, transport, media_link, headers=key_headers) + headers = {"accept-encoding": "gzip"} + headers.update(_get_encryption_headers(key)) + blob._do_download.assert_called_once_with( + client._http, mock.ANY, media_link, headers, None, None, False + ) + stream = blob._do_download.mock_calls[0].args[1] + self.assertEqual(stream.name, temp.name) - def test_download_as_string(self): + def _download_as_string_helper(self, raw_download): blob_name = "blob-name" - transport = self._mock_download_transport() - # Create a fake client/bucket and use them in the Blob() constructor. - client = mock.Mock(_http=transport, spec=["_http"]) + client = mock.Mock(spec=["_http"]) bucket = _Bucket(client) media_link = "http://example.com/media/" properties = {"mediaLink": media_link} blob = self._make_one(blob_name, bucket=bucket, properties=properties) - # Modify the blob so there there will be 2 chunks of size 3. - blob._CHUNK_SIZE_MULTIPLE = 1 - blob.chunk_size = 3 + blob._do_download = mock.Mock() + + fetched = blob.download_as_string(raw_download=raw_download) + self.assertEqual(fetched, b"") + + headers = {"accept-encoding": "gzip"} + blob._do_download.assert_called_once_with( + client._http, mock.ANY, media_link, headers, None, None, raw_download + ) + stream = blob._do_download.mock_calls[0].args[1] + self.assertIsInstance(stream, io.BytesIO) - fetched = blob.download_as_string() - self.assertEqual(fetched, b"abcdef") + def test_download_as_string_wo_raw(self): + self._download_as_string_helper(raw_download=False) - self._check_session_mocks(client, transport, media_link) + def test_download_as_string_w_raw(self): + self._download_as_string_helper(raw_download=True) def test__get_content_type_explicit(self): blob = self._make_one(u"blob-name", bucket=None) @@ -1348,7 +1269,9 @@ def _do_multipart_success( mock_get_boundary.assert_called_once_with() - upload_url = "https://www.googleapis.com/upload/storage/v1" + bucket.path + "/o" + upload_url = ( + "https://storage.googleapis.com/upload/storage/v1" + bucket.path + "/o" + ) qs_params = [("uploadType", "multipart")] @@ -1432,6 +1355,7 @@ def _initiate_resumable_helper( ): from six.moves.urllib.parse import urlencode from google.resumable_media.requests import ResumableUpload + from google.cloud.storage.blob import _DEFAULT_CHUNKSIZE bucket = _Bucket(name="whammy", user_project=user_project) blob = self._make_one(u"blob-name", bucket=bucket, kms_key_name=kms_key_name) @@ -1471,7 +1395,9 @@ def _initiate_resumable_helper( # Check the returned values. self.assertIsInstance(upload, ResumableUpload) - upload_url = "https://www.googleapis.com/upload/storage/v1" + bucket.path + "/o" + upload_url = ( + "https://storage.googleapis.com/upload/storage/v1" + bucket.path + "/o" + ) qs_params = [("uploadType", "resumable")] if user_project is not None: @@ -1494,9 +1420,7 @@ def _initiate_resumable_helper( self.assertFalse(upload.finished) if chunk_size is None: if blob_chunk_size is None: - self.assertEqual( - upload._chunk_size, google.cloud.storage.blob._DEFAULT_CHUNKSIZE - ) + self.assertEqual(upload._chunk_size, _DEFAULT_CHUNKSIZE) else: self.assertEqual(upload._chunk_size, blob.chunk_size) else: @@ -1594,7 +1518,7 @@ def _make_resumable_transport(self, headers1, headers2, headers3, total_bytes): def _do_resumable_upload_call0(blob, content_type, size=None, predefined_acl=None): # First mock transport.request() does initiates upload. upload_url = ( - "https://www.googleapis.com/upload/storage/v1" + "https://storage.googleapis.com/upload/storage/v1" + blob.bucket.path + "/o?uploadType=resumable" ) @@ -1728,6 +1652,8 @@ def test__do_resumable_upload_with_predefined_acl(self): def _do_upload_helper( self, chunk_size=None, num_retries=None, predefined_acl=None, size=None ): + from google.cloud.storage.blob import _MAX_MULTIPART_SIZE + blob = self._make_one(u"blob-name", bucket=None) # Create a fake response. @@ -1754,7 +1680,7 @@ def _do_upload_helper( ) self.assertIs(created_json, mock.sentinel.json) response.json.assert_called_once_with() - if size is not None and size <= google.cloud.storage.blob._MAX_MULTIPART_SIZE: + if size is not None and size <= _MAX_MULTIPART_SIZE: blob._do_multipart_upload.assert_called_once_with( client, stream, content_type, size, num_retries, predefined_acl ) @@ -1766,13 +1692,15 @@ def _do_upload_helper( ) def test__do_upload_uses_multipart(self): - self._do_upload_helper(size=google.cloud.storage.blob._MAX_MULTIPART_SIZE) + from google.cloud.storage.blob import _MAX_MULTIPART_SIZE + + self._do_upload_helper(size=_MAX_MULTIPART_SIZE) def test__do_upload_uses_resumable(self): - self._do_upload_helper( - chunk_size=256 * 1024, # 256KB - size=google.cloud.storage.blob._MAX_MULTIPART_SIZE + 1, - ) + from google.cloud.storage.blob import _MAX_MULTIPART_SIZE + + chunk_size = 256 * 1024 # 256KB + self._do_upload_helper(chunk_size=chunk_size, size=_MAX_MULTIPART_SIZE + 1) def test__do_upload_with_retry(self): self._do_upload_helper(num_retries=20) @@ -1953,7 +1881,7 @@ def _create_resumable_upload_session_helper(self, origin=None, side_effect=None) # Check the mocks. upload_url = ( - "https://www.googleapis.com/upload/storage/v1" + "https://storage.googleapis.com/upload/storage/v1" + bucket.path + "/o?uploadType=resumable" ) @@ -2496,9 +2424,6 @@ def test_rewrite_other_bucket_other_name_no_encryption_partial(self): self.assertNotIn("X-Goog-Encryption-Key-Sha256", headers) def test_rewrite_same_name_no_old_key_new_key_done_w_user_project(self): - import base64 - import hashlib - KEY = b"01234567890123456789012345678901" # 32 bytes KEY_B64 = base64.b64encode(KEY).rstrip().decode("ascii") KEY_HASH = hashlib.sha256(KEY).digest() @@ -2542,9 +2467,6 @@ def test_rewrite_same_name_no_old_key_new_key_done_w_user_project(self): self.assertEqual(headers["X-Goog-Encryption-Key-Sha256"], KEY_HASH_B64) def test_rewrite_same_name_no_key_new_key_w_token(self): - import base64 - import hashlib - SOURCE_KEY = b"01234567890123456789012345678901" # 32 bytes SOURCE_KEY_B64 = base64.b64encode(SOURCE_KEY).rstrip().decode("ascii") SOURCE_KEY_HASH = hashlib.sha256(SOURCE_KEY).digest() @@ -2594,9 +2516,6 @@ def test_rewrite_same_name_no_key_new_key_w_token(self): self.assertEqual(headers["X-Goog-Encryption-Key-Sha256"], DEST_KEY_HASH_B64) def test_rewrite_same_name_w_old_key_new_kms_key(self): - import base64 - import hashlib - SOURCE_KEY = b"01234567890123456789012345678901" # 32 bytes SOURCE_KEY_B64 = base64.b64encode(SOURCE_KEY).rstrip().decode("ascii") SOURCE_KEY_HASH = hashlib.sha256(SOURCE_KEY).digest() @@ -2718,9 +2637,6 @@ def test_update_storage_class_wo_encryption_key(self): self.assertNotIn("X-Goog-Encryption-Key-Sha256", headers) def test_update_storage_class_w_encryption_key_w_user_project(self): - import base64 - import hashlib - BLOB_NAME = "blob-name" BLOB_KEY = b"01234567890123456789012345678901" # 32 bytes BLOB_KEY_B64 = base64.b64encode(BLOB_KEY).rstrip().decode("ascii") @@ -3077,7 +2993,7 @@ def test_size_string_val(self): def test_storage_class_getter(self): blob_name = "blob-name" bucket = _Bucket() - storage_class = "MULTI_REGIONAL" + storage_class = "COLDLINE" properties = {"storageClass": storage_class} blob = self._make_one(blob_name, bucket=bucket, properties=properties) self.assertEqual(blob.storage_class, storage_class) diff --git a/storage/tests/unit/test_bucket.py b/storage/tests/unit/test_bucket.py index e99ac6356a96..2646e05838af 100644 --- a/storage/tests/unit/test_bucket.py +++ b/storage/tests/unit/test_bucket.py @@ -47,13 +47,13 @@ def test_ctor_wo_conditions(self): self._make_one() def test_ctor_w_age_and_matches_storage_class(self): - conditions = self._make_one(age=10, matches_storage_class=["REGIONAL"]) - expected = {"age": 10, "matchesStorageClass": ["REGIONAL"]} + conditions = self._make_one(age=10, matches_storage_class=["COLDLINE"]) + expected = {"age": 10, "matchesStorageClass": ["COLDLINE"]} self.assertEqual(dict(conditions), expected) self.assertEqual(conditions.age, 10) self.assertIsNone(conditions.created_before) self.assertIsNone(conditions.is_live) - self.assertEqual(conditions.matches_storage_class, ["REGIONAL"]) + self.assertEqual(conditions.matches_storage_class, ["COLDLINE"]) self.assertIsNone(conditions.number_of_newer_versions) def test_ctor_w_created_before_and_is_live(self): @@ -88,14 +88,14 @@ def test_from_api_repr(self): "age": 10, "createdBefore": "2018-08-01", "isLive": True, - "matchesStorageClass": ["REGIONAL"], + "matchesStorageClass": ["COLDLINE"], "numNewerVersions": 3, } conditions = klass.from_api_repr(resource) self.assertEqual(conditions.age, 10) self.assertEqual(conditions.created_before, before) self.assertEqual(conditions.is_live, True) - self.assertEqual(conditions.matches_storage_class, ["REGIONAL"]) + self.assertEqual(conditions.matches_storage_class, ["COLDLINE"]) self.assertEqual(conditions.number_of_newer_versions, 3) @@ -114,10 +114,10 @@ def test_ctor_wo_conditions(self): self._make_one() def test_ctor_w_condition(self): - rule = self._make_one(age=10, matches_storage_class=["REGIONAL"]) + rule = self._make_one(age=10, matches_storage_class=["COLDLINE"]) expected = { "action": {"type": "Delete"}, - "condition": {"age": 10, "matchesStorageClass": ["REGIONAL"]}, + "condition": {"age": 10, "matchesStorageClass": ["COLDLINE"]}, } self.assertEqual(dict(rule), expected) @@ -127,7 +127,7 @@ def test_from_api_repr(self): "age": 10, "createdBefore": "2018-08-01", "isLive": True, - "matchesStorageClass": ["REGIONAL"], + "matchesStorageClass": ["COLDLINE"], "numNewerVersions": 3, } resource = {"action": {"type": "Delete"}, "condition": conditions} @@ -147,15 +147,15 @@ def _make_one(self, **kw): def test_ctor_wo_conditions(self): with self.assertRaises(ValueError): - self._make_one(storage_class="REGIONAL") + self._make_one(storage_class="COLDLINE") def test_ctor_w_condition(self): rule = self._make_one( - storage_class="NEARLINE", age=10, matches_storage_class=["REGIONAL"] + storage_class="COLDLINE", age=10, matches_storage_class=["NEARLINE"] ) expected = { - "action": {"type": "SetStorageClass", "storageClass": "NEARLINE"}, - "condition": {"age": 10, "matchesStorageClass": ["REGIONAL"]}, + "action": {"type": "SetStorageClass", "storageClass": "COLDLINE"}, + "condition": {"age": 10, "matchesStorageClass": ["NEARLINE"]}, } self.assertEqual(dict(rule), expected) @@ -165,11 +165,11 @@ def test_from_api_repr(self): "age": 10, "createdBefore": "2018-08-01", "isLive": True, - "matchesStorageClass": ["REGIONAL"], + "matchesStorageClass": ["NEARLINE"], "numNewerVersions": 3, } resource = { - "action": {"type": "SetStorageClass", "storageClass": "NEARLINE"}, + "action": {"type": "SetStorageClass", "storageClass": "COLDLINE"}, "condition": conditions, } rule = klass.from_api_repr(resource) @@ -198,10 +198,12 @@ def test_ctor_defaults(self): config = self._make_one(bucket) self.assertIs(config.bucket, bucket) + self.assertFalse(config.uniform_bucket_level_access_enabled) + self.assertIsNone(config.uniform_bucket_level_access_locked_time) self.assertFalse(config.bucket_policy_only_enabled) self.assertIsNone(config.bucket_policy_only_locked_time) - def test_ctor_explicit(self): + def test_ctor_explicit_ubla(self): import datetime import pytz @@ -209,13 +211,62 @@ def test_ctor_explicit(self): now = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) config = self._make_one( - bucket, bucket_policy_only_enabled=True, bucket_policy_only_locked_time=now + bucket, + uniform_bucket_level_access_enabled=True, + uniform_bucket_level_access_locked_time=now, ) self.assertIs(config.bucket, bucket) + self.assertTrue(config.uniform_bucket_level_access_enabled) + self.assertEqual(config.uniform_bucket_level_access_locked_time, now) self.assertTrue(config.bucket_policy_only_enabled) self.assertEqual(config.bucket_policy_only_locked_time, now) + def test_ctor_explicit_bpo(self): + import datetime + import pytz + + bucket = self._make_bucket() + now = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) + + config = pytest.deprecated_call( + self._make_one, + bucket, + bucket_policy_only_enabled=True, + bucket_policy_only_locked_time=now, + ) + + self.assertIs(config.bucket, bucket) + self.assertTrue(config.uniform_bucket_level_access_enabled) + self.assertEqual(config.uniform_bucket_level_access_locked_time, now) + self.assertTrue(config.bucket_policy_only_enabled) + self.assertEqual(config.bucket_policy_only_locked_time, now) + + def test_ctor_ubla_and_bpo_enabled(self): + bucket = self._make_bucket() + + with self.assertRaises(ValueError): + self._make_one( + bucket, + uniform_bucket_level_access_enabled=True, + bucket_policy_only_enabled=True, + ) + + def test_ctor_ubla_and_bpo_time(self): + import datetime + import pytz + + bucket = self._make_bucket() + now = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) + + with self.assertRaises(ValueError): + self._make_one( + bucket, + uniform_bucket_level_access_enabled=True, + uniform_bucket_level_access_locked_time=now, + bucket_policy_only_locked_time=now, + ) + def test_from_api_repr_w_empty_resource(self): klass = self._get_target_class() bucket = self._make_bucket() @@ -230,7 +281,7 @@ def test_from_api_repr_w_empty_resource(self): def test_from_api_repr_w_empty_bpo(self): klass = self._get_target_class() bucket = self._make_bucket() - resource = {"bucketPolicyOnly": {}} + resource = {"uniformBucketLevelAccess": {}} config = klass.from_api_repr(resource, bucket) @@ -241,7 +292,7 @@ def test_from_api_repr_w_empty_bpo(self): def test_from_api_repr_w_disabled(self): klass = self._get_target_class() bucket = self._make_bucket() - resource = {"bucketPolicyOnly": {"enabled": False}} + resource = {"uniformBucketLevelAccess": {"enabled": False}} config = klass.from_api_repr(resource, bucket) @@ -258,7 +309,7 @@ def test_from_api_repr_w_enabled(self): bucket = self._make_bucket() now = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) resource = { - "bucketPolicyOnly": { + "uniformBucketLevelAccess": { "enabled": True, "lockedTime": _datetime_to_rfc3339(now), } @@ -267,16 +318,30 @@ def test_from_api_repr_w_enabled(self): config = klass.from_api_repr(resource, bucket) self.assertIs(config.bucket, bucket) + self.assertTrue(config.uniform_bucket_level_access_enabled) + self.assertEqual(config.uniform_bucket_level_access_locked_time, now) self.assertTrue(config.bucket_policy_only_enabled) self.assertEqual(config.bucket_policy_only_locked_time, now) + def test_uniform_bucket_level_access_enabled_setter(self): + bucket = self._make_bucket() + config = self._make_one(bucket) + + config.uniform_bucket_level_access_enabled = True + self.assertTrue(config.bucket_policy_only_enabled) + + self.assertTrue(config["uniformBucketLevelAccess"]["enabled"]) + bucket._patch_property.assert_called_once_with("iamConfiguration", config) + def test_bucket_policy_only_enabled_setter(self): bucket = self._make_bucket() config = self._make_one(bucket) - config.bucket_policy_only_enabled = True + with pytest.deprecated_call(): + config.bucket_policy_only_enabled = True - self.assertTrue(config["bucketPolicyOnly"]["enabled"]) + self.assertTrue(config.uniform_bucket_level_access_enabled) + self.assertTrue(config["uniformBucketLevelAccess"]["enabled"]) bucket._patch_property.assert_called_once_with("iamConfiguration", config) @@ -646,6 +711,60 @@ def test_create_w_extra_properties(self): self.assertEqual(kw["query_params"], {"project": PROJECT}) self.assertEqual(kw["data"], DATA) + def test_create_w_predefined_acl_invalid(self): + PROJECT = "PROJECT" + BUCKET_NAME = "bucket-name" + DATA = {"name": BUCKET_NAME} + connection = _Connection(DATA) + client = _Client(connection, project=PROJECT) + bucket = self._make_one(client=client, name=BUCKET_NAME) + + with self.assertRaises(ValueError): + bucket.create(predefined_acl="bogus") + + def test_create_w_predefined_acl_valid(self): + PROJECT = "PROJECT" + BUCKET_NAME = "bucket-name" + DATA = {"name": BUCKET_NAME} + connection = _Connection(DATA) + client = _Client(connection, project=PROJECT) + bucket = self._make_one(client=client, name=BUCKET_NAME) + bucket.create(predefined_acl="publicRead") + + kw, = connection._requested + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], "/b") + expected_qp = {"project": PROJECT, "predefinedAcl": "publicRead"} + self.assertEqual(kw["query_params"], expected_qp) + self.assertEqual(kw["data"], DATA) + + def test_create_w_predefined_default_object_acl_invalid(self): + PROJECT = "PROJECT" + BUCKET_NAME = "bucket-name" + DATA = {"name": BUCKET_NAME} + connection = _Connection(DATA) + client = _Client(connection, project=PROJECT) + bucket = self._make_one(client=client, name=BUCKET_NAME) + + with self.assertRaises(ValueError): + bucket.create(predefined_default_object_acl="bogus") + + def test_create_w_predefined_default_object_acl_valid(self): + PROJECT = "PROJECT" + BUCKET_NAME = "bucket-name" + DATA = {"name": BUCKET_NAME} + connection = _Connection(DATA) + client = _Client(connection, project=PROJECT) + bucket = self._make_one(client=client, name=BUCKET_NAME) + bucket.create(predefined_default_object_acl="publicRead") + + kw, = connection._requested + self.assertEqual(kw["method"], "POST") + self.assertEqual(kw["path"], "/b") + expected_qp = {"project": PROJECT, "predefinedDefaultObjectAcl": "publicRead"} + self.assertEqual(kw["query_params"], expected_qp) + self.assertEqual(kw["data"], DATA) + def test_acl_property(self): from google.cloud.storage.acl import BucketACL @@ -1263,7 +1382,7 @@ def test_iam_configuration_policy_w_entry(self): NAME = "name" properties = { "iamConfiguration": { - "bucketPolicyOnly": { + "uniformBucketLevelAccess": { "enabled": True, "lockedTime": _datetime_to_rfc3339(now), } @@ -1275,8 +1394,8 @@ def test_iam_configuration_policy_w_entry(self): self.assertIsInstance(config, IAMConfiguration) self.assertIs(config.bucket, bucket) - self.assertTrue(config.bucket_policy_only_enabled) - self.assertEqual(config.bucket_policy_only_locked_time, now) + self.assertTrue(config.uniform_bucket_level_access_enabled) + self.assertEqual(config.uniform_bucket_level_access_locked_time, now) def test_lifecycle_rules_getter_unknown_action_type(self): NAME = "name" @@ -1511,10 +1630,11 @@ def test_location_type_getter_unset(self): self.assertIsNone(bucket.location_type) def test_location_type_getter_set(self): - klass = self._get_target_class() - properties = {"locationType": klass.REGION_LOCATION_TYPE} + from google.cloud.storage.constants import REGION_LOCATION_TYPE + + properties = {"locationType": REGION_LOCATION_TYPE} bucket = self._make_one(properties=properties) - self.assertEqual(bucket.location_type, klass.REGION_LOCATION_TYPE) + self.assertEqual(bucket.location_type, REGION_LOCATION_TYPE) def test_get_logging_w_prefix(self): NAME = "name" @@ -1678,10 +1798,11 @@ def test_self_link(self): self.assertEqual(bucket.self_link, SELF_LINK) def test_storage_class_getter(self): - klass = self._get_target_class() - properties = {"storageClass": klass.NEARLINE_STORAGE_CLASS} + from google.cloud.storage.constants import NEARLINE_STORAGE_CLASS + + properties = {"storageClass": NEARLINE_STORAGE_CLASS} bucket = self._make_one(properties=properties) - self.assertEqual(bucket.storage_class, klass.NEARLINE_STORAGE_CLASS) + self.assertEqual(bucket.storage_class, NEARLINE_STORAGE_CLASS) def test_storage_class_setter_invalid(self): NAME = "name" @@ -1691,55 +1812,60 @@ def test_storage_class_setter_invalid(self): self.assertFalse("storageClass" in bucket._changes) def test_storage_class_setter_STANDARD(self): - klass = self._get_target_class() + from google.cloud.storage.constants import STANDARD_STORAGE_CLASS + NAME = "name" bucket = self._make_one(name=NAME) - bucket.storage_class = klass.STANDARD_STORAGE_CLASS - self.assertEqual(bucket.storage_class, klass.STANDARD_STORAGE_CLASS) + bucket.storage_class = STANDARD_STORAGE_CLASS + self.assertEqual(bucket.storage_class, STANDARD_STORAGE_CLASS) self.assertTrue("storageClass" in bucket._changes) def test_storage_class_setter_NEARLINE(self): - klass = self._get_target_class() + from google.cloud.storage.constants import NEARLINE_STORAGE_CLASS + NAME = "name" bucket = self._make_one(name=NAME) - bucket.storage_class = klass.NEARLINE_STORAGE_CLASS - self.assertEqual(bucket.storage_class, klass.NEARLINE_STORAGE_CLASS) + bucket.storage_class = NEARLINE_STORAGE_CLASS + self.assertEqual(bucket.storage_class, NEARLINE_STORAGE_CLASS) self.assertTrue("storageClass" in bucket._changes) def test_storage_class_setter_COLDLINE(self): - klass = self._get_target_class() + from google.cloud.storage.constants import COLDLINE_STORAGE_CLASS + NAME = "name" bucket = self._make_one(name=NAME) - bucket.storage_class = klass.COLDLINE_STORAGE_CLASS - self.assertEqual(bucket.storage_class, klass.COLDLINE_STORAGE_CLASS) + bucket.storage_class = COLDLINE_STORAGE_CLASS + self.assertEqual(bucket.storage_class, COLDLINE_STORAGE_CLASS) self.assertTrue("storageClass" in bucket._changes) def test_storage_class_setter_MULTI_REGIONAL(self): - klass = self._get_target_class() + from google.cloud.storage.constants import MULTI_REGIONAL_LEGACY_STORAGE_CLASS + NAME = "name" bucket = self._make_one(name=NAME) - bucket.storage_class = klass.MULTI_REGIONAL_LEGACY_STORAGE_CLASS - self.assertEqual( - bucket.storage_class, klass.MULTI_REGIONAL_LEGACY_STORAGE_CLASS - ) + bucket.storage_class = MULTI_REGIONAL_LEGACY_STORAGE_CLASS + self.assertEqual(bucket.storage_class, MULTI_REGIONAL_LEGACY_STORAGE_CLASS) self.assertTrue("storageClass" in bucket._changes) def test_storage_class_setter_REGIONAL(self): - klass = self._get_target_class() + from google.cloud.storage.constants import REGIONAL_LEGACY_STORAGE_CLASS + NAME = "name" bucket = self._make_one(name=NAME) - bucket.storage_class = klass.REGIONAL_LEGACY_STORAGE_CLASS - self.assertEqual(bucket.storage_class, klass.REGIONAL_LEGACY_STORAGE_CLASS) + bucket.storage_class = REGIONAL_LEGACY_STORAGE_CLASS + self.assertEqual(bucket.storage_class, REGIONAL_LEGACY_STORAGE_CLASS) self.assertTrue("storageClass" in bucket._changes) def test_storage_class_setter_DURABLE_REDUCED_AVAILABILITY(self): - klass = self._get_target_class() + from google.cloud.storage.constants import ( + DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS, + ) + NAME = "name" bucket = self._make_one(name=NAME) - bucket.storage_class = klass.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS + bucket.storage_class = DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS self.assertEqual( - bucket.storage_class, - klass.DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS, + bucket.storage_class, DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS ) self.assertTrue("storageClass" in bucket._changes) diff --git a/storage/tests/unit/test_client.py b/storage/tests/unit/test_client.py index f8f073d887f2..f8a857d164a0 100644 --- a/storage/tests/unit/test_client.py +++ b/storage/tests/unit/test_client.py @@ -954,7 +954,7 @@ def dummy_response(): self.assertIsInstance(bucket, Bucket) self.assertEqual(bucket.name, blob_name) - def _create_hmac_key_helper(self, explicit_project=None): + def _create_hmac_key_helper(self, explicit_project=None, user_project=None): import datetime from pytz import UTC from six.moves.urllib.parse import urlencode @@ -996,6 +996,9 @@ def _create_hmac_key_helper(self, explicit_project=None): if explicit_project is not None: kwargs["project_id"] = explicit_project + if user_project is not None: + kwargs["user_project"] = user_project + metadata, secret = client.create_hmac_key(service_account_email=EMAIL, **kwargs) self.assertIsInstance(metadata, HMACKeyMetadata) @@ -1013,8 +1016,12 @@ def _create_hmac_key_helper(self, explicit_project=None): "hmacKeys", ] ) - QS_PARAMS = {"serviceAccountEmail": EMAIL} - FULL_URI = "{}?{}".format(URI, urlencode(QS_PARAMS)) + qs_params = {"serviceAccountEmail": EMAIL} + + if user_project is not None: + qs_params["userProject"] = user_project + + FULL_URI = "{}?{}".format(URI, urlencode(qs_params)) http.request.assert_called_once_with( method="POST", url=FULL_URI, data=None, headers=mock.ANY ) @@ -1025,6 +1032,9 @@ def test_create_hmac_key_defaults(self): def test_create_hmac_key_explicit_project(self): self._create_hmac_key_helper(explicit_project="other-project-456") + def test_create_hmac_key_user_project(self): + self._create_hmac_key_helper(user_project="billed-project") + def test_list_hmac_keys_defaults_empty(self): PROJECT = "PROJECT" CREDENTIALS = _make_credentials() @@ -1060,6 +1070,7 @@ def test_list_hmac_keys_explicit_non_empty(self): MAX_RESULTS = 3 EMAIL = "storage-user-123@example.com" ACCESS_ID = "ACCESS-ID" + USER_PROJECT = "billed-project" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) @@ -1083,6 +1094,7 @@ def test_list_hmac_keys_explicit_non_empty(self): service_account_email=EMAIL, show_deleted_keys=True, project_id=OTHER_PROJECT, + user_project=USER_PROJECT, ) ) @@ -1107,6 +1119,7 @@ def test_list_hmac_keys_explicit_non_empty(self): "maxResults": str(MAX_RESULTS), "serviceAccountEmail": EMAIL, "showDeletedKeys": "True", + "userProject": USER_PROJECT, } http.request.assert_called_once_with( method="GET", url=mock.ANY, data=None, headers=mock.ANY @@ -1160,12 +1173,14 @@ def test_get_hmac_key_metadata_wo_project(self): ) def test_get_hmac_key_metadata_w_project(self): + from six.moves.urllib.parse import urlencode from google.cloud.storage.hmac_key import HMACKeyMetadata PROJECT = "PROJECT" OTHER_PROJECT = "other-project-456" EMAIL = "storage-user-123@example.com" ACCESS_ID = "ACCESS-ID" + USER_PROJECT = "billed-project" CREDENTIALS = _make_credentials() client = self._make_one(project=PROJECT, credentials=CREDENTIALS) @@ -1179,7 +1194,9 @@ def test_get_hmac_key_metadata_w_project(self): http = _make_requests_session([_make_json_response(resource)]) client._http_internal = http - metadata = client.get_hmac_key_metadata(ACCESS_ID, project_id=OTHER_PROJECT) + metadata = client.get_hmac_key_metadata( + ACCESS_ID, project_id=OTHER_PROJECT, user_project=USER_PROJECT + ) self.assertIsInstance(metadata, HMACKeyMetadata) self.assertIs(metadata._client, client) @@ -1197,6 +1214,10 @@ def test_get_hmac_key_metadata_w_project(self): ACCESS_ID, ] ) + + qs_params = {"userProject": USER_PROJECT} + FULL_URI = "{}?{}".format(URI, urlencode(qs_params)) + http.request.assert_called_once_with( - method="GET", url=URI, data=None, headers=mock.ANY + method="GET", url=FULL_URI, data=None, headers=mock.ANY ) diff --git a/storage/tests/unit/test_hmac_key.py b/storage/tests/unit/test_hmac_key.py index 399a82682a64..138742d5b672 100644 --- a/storage/tests/unit/test_hmac_key.py +++ b/storage/tests/unit/test_hmac_key.py @@ -46,12 +46,19 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): OTHER_PROJECT = "other-project-456" ACCESS_ID = "access-id-123456789" + USER_PROJECT = "billed-project" client = _Client() - metadata = self._make_one(client, access_id=ACCESS_ID, project_id=OTHER_PROJECT) + metadata = self._make_one( + client, + access_id=ACCESS_ID, + project_id=OTHER_PROJECT, + user_project=USER_PROJECT, + ) self.assertIs(metadata._client, client) expected = {"accessId": ACCESS_ID, "projectId": OTHER_PROJECT} self.assertEqual(metadata._properties, expected) self.assertEqual(metadata.access_id, ACCESS_ID) + self.assertEqual(metadata.user_project, USER_PROJECT) self.assertIsNone(metadata.etag) self.assertIsNone(metadata.id) self.assertEqual(metadata.project, OTHER_PROJECT) @@ -217,12 +224,13 @@ def test_exists_miss_no_project_set(self): expected_path = "/projects/{}/hmacKeys/{}".format( client.DEFAULT_PROJECT, access_id ) - expected_kwargs = {"method": "GET", "path": expected_path} + expected_kwargs = {"method": "GET", "path": expected_path, "query_params": {}} connection.api_request.assert_called_once_with(**expected_kwargs) def test_exists_hit_w_project_set(self): project = "PROJECT-ID" access_id = "ACCESS-ID" + user_project = "billed-project" email = "service-account@example.com" resource = { "kind": "storage#hmacKeyMetadata", @@ -232,14 +240,18 @@ def test_exists_hit_w_project_set(self): connection = mock.Mock(spec=["api_request"]) connection.api_request.return_value = resource client = _Client(connection) - metadata = self._make_one(client) + metadata = self._make_one(client, user_project=user_project) metadata._properties["accessId"] = access_id metadata._properties["projectId"] = project self.assertTrue(metadata.exists()) expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) - expected_kwargs = {"method": "GET", "path": expected_path} + expected_kwargs = { + "method": "GET", + "path": expected_path, + "query_params": {"userProject": user_project}, + } connection.api_request.assert_called_once_with(**expected_kwargs) def test_reload_miss_no_project_set(self): @@ -258,12 +270,13 @@ def test_reload_miss_no_project_set(self): expected_path = "/projects/{}/hmacKeys/{}".format( client.DEFAULT_PROJECT, access_id ) - expected_kwargs = {"method": "GET", "path": expected_path} + expected_kwargs = {"method": "GET", "path": expected_path, "query_params": {}} connection.api_request.assert_called_once_with(**expected_kwargs) def test_reload_hit_w_project_set(self): project = "PROJECT-ID" access_id = "ACCESS-ID" + user_project = "billed-project" email = "service-account@example.com" resource = { "kind": "storage#hmacKeyMetadata", @@ -273,7 +286,7 @@ def test_reload_hit_w_project_set(self): connection = mock.Mock(spec=["api_request"]) connection.api_request.return_value = resource client = _Client(connection) - metadata = self._make_one(client) + metadata = self._make_one(client, user_project=user_project) metadata._properties["accessId"] = access_id metadata._properties["projectId"] = project @@ -282,7 +295,11 @@ def test_reload_hit_w_project_set(self): self.assertEqual(metadata._properties, resource) expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) - expected_kwargs = {"method": "GET", "path": expected_path} + expected_kwargs = { + "method": "GET", + "path": expected_path, + "query_params": {"userProject": user_project}, + } connection.api_request.assert_called_once_with(**expected_kwargs) def test_update_miss_no_project_set(self): @@ -306,12 +323,14 @@ def test_update_miss_no_project_set(self): "method": "PUT", "path": expected_path, "data": {"state": "INACTIVE"}, + "query_params": {}, } connection.api_request.assert_called_once_with(**expected_kwargs) def test_update_hit_w_project_set(self): project = "PROJECT-ID" access_id = "ACCESS-ID" + user_project = "billed-project" email = "service-account@example.com" resource = { "kind": "storage#hmacKeyMetadata", @@ -322,7 +341,7 @@ def test_update_hit_w_project_set(self): connection = mock.Mock(spec=["api_request"]) connection.api_request.return_value = resource client = _Client(connection) - metadata = self._make_one(client) + metadata = self._make_one(client, user_project=user_project) metadata._properties["accessId"] = access_id metadata._properties["projectId"] = project metadata.state = "ACTIVE" @@ -336,6 +355,7 @@ def test_update_hit_w_project_set(self): "method": "PUT", "path": expected_path, "data": {"state": "ACTIVE"}, + "query_params": {"userProject": user_project}, } connection.api_request.assert_called_once_with(**expected_kwargs) @@ -364,16 +384,21 @@ def test_delete_miss_no_project_set(self): expected_path = "/projects/{}/hmacKeys/{}".format( client.DEFAULT_PROJECT, access_id ) - expected_kwargs = {"method": "DELETE", "path": expected_path} + expected_kwargs = { + "method": "DELETE", + "path": expected_path, + "query_params": {}, + } connection.api_request.assert_called_once_with(**expected_kwargs) def test_delete_hit_w_project_set(self): project = "PROJECT-ID" access_id = "ACCESS-ID" + user_project = "billed-project" connection = mock.Mock(spec=["api_request"]) connection.api_request.return_value = {} client = _Client(connection) - metadata = self._make_one(client) + metadata = self._make_one(client, user_project=user_project) metadata._properties["accessId"] = access_id metadata._properties["projectId"] = project metadata.state = "INACTIVE" @@ -381,7 +406,11 @@ def test_delete_hit_w_project_set(self): metadata.delete() expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) - expected_kwargs = {"method": "DELETE", "path": expected_path} + expected_kwargs = { + "method": "DELETE", + "path": expected_path, + "query_params": {"userProject": user_project}, + } connection.api_request.assert_called_once_with(**expected_kwargs) diff --git a/talent/CHANGELOG.md b/talent/CHANGELOG.md index 2a1ccc914d29..411e49618df9 100644 --- a/talent/CHANGELOG.md +++ b/talent/CHANGELOG.md @@ -4,6 +4,31 @@ [1]: https://pypi.org/project/google-cloud-talent/#history +## 0.4.0 + +10-04-2019 14:29 PDT + +### Implementation Changes +- Move `BatchOperationMetadata` / `JobOperationResult` messages to new protobuf files (via synth). ([#9129](https://github.com/googleapis/google-cloud-python/pull/9129)) +- Import batch proto (via synth). ([#9062](https://github.com/googleapis/google-cloud-python/pull/9062)) +- Remove send / receive message size limit (via synth). ([#8970](https://github.com/googleapis/google-cloud-python/pull/8970)) + +### New Features +- Deprecate `candidate_availability_filter` for `availability_filters`, add `AvailabilitySignalType`, add fields to `update_profile` (via synth). ([#9256](https://github.com/googleapis/google-cloud-python/pull/9256)) +- Add `applications` / `assignments` fields to `Profile` message (via synth). ([#9229](https://github.com/googleapis/google-cloud-python/pull/9229)) +- Add `filter_` arg to `ProfileServiceClient.list_profiles`; docstring updates (via synth). ([#9223](https://github.com/googleapis/google-cloud-python/pull/9223)) +- Deprecate job visibility (via synth). ([#9050](https://github.com/googleapis/google-cloud-python/pull/9050)) +- Document additional fields allowed in profile update mask (via synth). ([#9000](https://github.com/googleapis/google-cloud-python/pull/9000)) + +### Documentation +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update docstrings (via synth). ([#8986](https://github.com/googleapis/google-cloud-python/pull/8986)) + +### Internal / Testing Changes +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 0.3.0 07-24-2019 17:36 PDT diff --git a/talent/docs/conf.py b/talent/docs/conf.py index 9b26b1fe0f7f..8bcbde945032 100644 --- a/talent/docs/conf.py +++ b/talent/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/talent/google/cloud/talent_v4beta1/gapic/application_service_client.py b/talent/google/cloud/talent_v4beta1/gapic/application_service_client.py index f73835ce7795..7bfc6ccc9e4d 100644 --- a/talent/google/cloud/talent_v4beta1/gapic/application_service_client.py +++ b/talent/google/cloud/talent_v4beta1/gapic/application_service_client.py @@ -241,9 +241,8 @@ def create_application( created. The format is - "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile\_id}", - for example, - "projects/test-project/tenants/test-tenant/profiles/test-profile". + "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile\_id}". + For example, "projects/foo/tenants/bar/profiles/baz". application (Union[dict, ~google.cloud.talent_v4beta1.types.Application]): Required. The application to be created. If a dict is provided, it must be of the same form as the protobuf @@ -321,9 +320,8 @@ def get_application( name (str): Required. The resource name of the application to be retrieved. The format is - "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile\_id}/applications/{application\_id}", - for example, - "projects/test-project/tenants/test-tenant/profiles/test-profile/applications/test-application". + "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile\_id}/applications/{application\_id}". + For example, "projects/foo/tenants/bar/profiles/baz/applications/qux". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -399,7 +397,7 @@ def update_application( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Application` - update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended for the best service experience. + update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Strongly recommended for the best service experience. If ``update_mask`` is provided, only the specified fields in ``application`` are updated. Otherwise all the fields are updated. @@ -482,9 +480,8 @@ def delete_application( name (str): Required. The resource name of the application to be deleted. The format is - "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile\_id}/applications/{application\_id}", - for example, - "projects/test-project/tenants/test-tenant/profiles/test-profile/applications/test-application". + "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile\_id}/applications/{application\_id}". + For example, "projects/foo/tenants/bar/profiles/baz/applications/qux". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -568,8 +565,7 @@ def list_applications( The format is "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile\_id}", - for example, - "projects/test-project/tenants/test-tenant/profiles/test-profile". + for example, "projects/foo/tenants/bar/profiles/baz". page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page diff --git a/talent/google/cloud/talent_v4beta1/gapic/company_service_client.py b/talent/google/cloud/talent_v4beta1/gapic/company_service_client.py index e2c101363ea0..64e861b4eae6 100644 --- a/talent/google/cloud/talent_v4beta1/gapic/company_service_client.py +++ b/talent/google/cloud/talent_v4beta1/gapic/company_service_client.py @@ -249,10 +249,8 @@ def create_company( created. The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenant/foo". - - Tenant id is optional and a default tenant is created if unspecified, - for example, "projects/api-test-project". + example, "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created, for example, "projects/foo". company (Union[dict, ~google.cloud.talent_v4beta1.types.Company]): Required. The company to be created. If a dict is provided, it must be of the same form as the protobuf @@ -333,8 +331,8 @@ def get_company( "projects/{project\_id}/tenants/{tenant\_id}/companies/{company\_id}", for example, "projects/api-test-project/tenants/foo/companies/bar". - Tenant id is optional and the default tenant is used if unspecified, for - example, "projects/api-test-project/companies/bar". + If tenant id is unspecified, the default tenant is used, for example, + "projects/api-test-project/companies/bar". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -410,7 +408,7 @@ def update_company( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Company` - update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended for the best service experience. + update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Strongly recommended for the best service experience. If ``update_mask`` is provided, only the specified fields in ``company`` are updated. Otherwise all the fields are updated. @@ -495,10 +493,10 @@ def delete_company( The format is "projects/{project\_id}/tenants/{tenant\_id}/companies/{company\_id}", - for example, "projects/api-test-project/tenants/foo/companies/bar". + for example, "projects/foo/tenants/bar/companies/baz". - Tenant id is optional and the default tenant is used if unspecified, for - example, "projects/api-test-project/companies/bar". + If tenant id is unspecified, the default tenant is used, for example, + "projects/foo/companies/bar". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -582,16 +580,16 @@ def list_companies( created. The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenant/foo". + example, "projects/foo/tenant/bar". - Tenant id is optional and the default tenant is used if unspecified, for - example, "projects/api-test-project". + If tenant id is unspecified, the default tenant will be used, for + example, "projects/foo". page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. - require_open_jobs (bool): Optional. Set to true if the companies requested must have open jobs. + require_open_jobs (bool): Set to true if the companies requested must have open jobs. Defaults to false. diff --git a/talent/google/cloud/talent_v4beta1/gapic/completion_client.py b/talent/google/cloud/talent_v4beta1/gapic/completion_client.py index d33123d98c98..6d5331a62a79 100644 --- a/talent/google/cloud/talent_v4beta1/gapic/completion_client.py +++ b/talent/google/cloud/talent_v4beta1/gapic/completion_client.py @@ -257,19 +257,19 @@ def complete_query( parent (str): Required. Resource name of tenant the completion is performed within. The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenant/foo". + example, "projects/foo/tenant/bar". - Tenant id is optional and the default tenant is used if unspecified, for - example, "projects/api-test-project". + If tenant id is unspecified, the default tenant is used, for example, + "projects/foo". query (str): Required. The query used to generate suggestions. The maximum number of allowed characters is 255. page_size (int): Required. Completion result count. The maximum allowed page size is 10. - language_codes (list[str]): Optional. The list of languages of the query. This is the BCP-47 - language code, such as "en-US" or "sr-Latn". For more information, see - `Tags for Identifying Languages `__. + language_codes (list[str]): The list of languages of the query. This is the BCP-47 language code, + such as "en-US" or "sr-Latn". For more information, see `Tags for + Identifying Languages `__. For ``CompletionType.JOB_TITLE`` type, only open jobs with the same ``language_codes`` are returned. @@ -282,18 +282,16 @@ def complete_query( ``language_codes`` are returned. The maximum number of allowed characters is 255. - company (str): Optional. If provided, restricts completion to specified company. + company (str): If provided, restricts completion to specified company. The format is "projects/{project\_id}/tenants/{tenant\_id}/companies/{company\_id}", - for example, "projects/api-test-project/tenants/foo/companies/bar". - - Tenant id is optional and the default tenant is used if unspecified, for - example, "projects/api-test-project/companies/bar". - scope (~google.cloud.talent_v4beta1.types.CompletionScope): Optional. The scope of the completion. The defaults is - ``CompletionScope.PUBLIC``. - type_ (~google.cloud.talent_v4beta1.types.CompletionType): Optional. The completion topic. The default is - ``CompletionType.COMBINED``. + for example, "projects/foo/tenants/bar/companies/baz". + + If tenant id is unspecified, the default tenant is used, for example, + "projects/foo". + scope (~google.cloud.talent_v4beta1.types.CompletionScope): The scope of the completion. The defaults is ``CompletionScope.PUBLIC``. + type_ (~google.cloud.talent_v4beta1.types.CompletionType): The completion topic. The default is ``CompletionType.COMBINED``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. diff --git a/talent/google/cloud/talent_v4beta1/gapic/enums.py b/talent/google/cloud/talent_v4beta1/gapic/enums.py index 0769f3ed8426..b9293a53a725 100644 --- a/talent/google/cloud/talent_v4beta1/gapic/enums.py +++ b/talent/google/cloud/talent_v4beta1/gapic/enums.py @@ -19,6 +19,64 @@ import enum +class AvailabilitySignalType(enum.IntEnum): + """ + The type of candidate availability signal. + + Attributes: + AVAILABILITY_SIGNAL_TYPE_UNSPECIFIED (int): Default value. + JOB_APPLICATION (int): Job application signal. + + In the context of ``Profile.availability_signals``, this signal is + related to the candidate's most recent application. ``last_update_time`` + is calculated from max(\ ``Application.create_time``) from all + ``Application`` records where ``Application.source`` is any of the + following: ``APPLY_DIRECT_WEB`` ``APPLY_DIRECT_MOBILE_WEB`` + ``APPLY_DIRECT_MOBILE_APP`` ``APPLY_DIRECT_IN_PERSON`` + ``APPLY_INDIRECT`` + + In the context of ``AvailabilityFilter``, the filter is applied on + ``Profile.availability_signals`` where ``type`` is JOB\_APPLICATION. + RESUME_UPDATE (int): Resume update signal. + + In the context of ``Profile.availability_signals``, this signal is + related to the candidate’s most recent update to their resume. For a + ``SummarizedProfile.summary``, ``last_update_time`` is calculated from + max(\ ``Profile.resume_update_time``) from all + ``SummarizedProfile.profiles``. + + In the context of ``AvailabilityFilter``, the filter is applied on + ``Profile.availability_signals`` where ``type`` is RESUME\_UPDATE. + CANDIDATE_UPDATE (int): Candidate update signal. + + In the context of ``Profile.availability_signals``, this signal is + related to the candidate’s most recent update to their profile. For a + ``SummarizedProfile.summary``, ``last_update_time`` is calculated from + max(\ ``Profile.candidate_update_time``) from all + ``SummarizedProfile.profiles``. + + In the context of ``AvailabilityFilter``, the filter is applied on + ``Profile.availability_signals`` where ``type`` is CANDIDATE\_UPDATE. + CLIENT_SUBMISSION (int): Client submission signal. + + In the context of ``Profile.availability_signals``, this signal is + related to the candidate’s most recent submission. ``last_update_time`` + is calculated from max(\ ``Application.create_time``) from all + ``Application`` records where ``Application.stage`` is any of the + following: ``HIRING_MANAGER_REVIEW`` ``INTERVIEW`` ``OFFER_EXTENDED`` + ``OFFER_ACCEPTED`` ``STARTED`` + + In the context of ``AvailabilityFilter``, the filter is applied on + ``Profile.availability_signals`` where ``type`` is CLIENT\_SUBMISSION. + """ + + AVAILABILITY_SIGNAL_TYPE_UNSPECIFIED = 0 + JOB_APPLICATION = 1 + RESUME_UPDATE = 2 + CANDIDATE_UPDATE = 3 + CLIENT_SUBMISSION = 4 + + class CommuteMethod(enum.IntEnum): """ Method for commute. @@ -179,8 +237,6 @@ class EmploymentType(enum.IntEnum): class HtmlSanitization(enum.IntEnum): """ - Input only. - Option for HTML content sanitization on user input fields, for example, job description. By setting this option, user can determine whether and how sanitization is performed on these fields. @@ -999,7 +1055,8 @@ class SearchMode(enum.IntEnum): operate differently for different modes of service. Attributes: - SEARCH_MODE_UNSPECIFIED (int): The mode of the search method isn't specified. + SEARCH_MODE_UNSPECIFIED (int): The mode of the search method isn't specified. The default search + behavior is identical to JOB\_SEARCH search behavior. JOB_SEARCH (int): The job search matches against all jobs, and featured jobs (jobs with promotionValue > 0) are not specially handled. FEATURED_JOB_SEARCH (int): The job search matches only against featured jobs (jobs with a diff --git a/talent/google/cloud/talent_v4beta1/gapic/event_service_client.py b/talent/google/cloud/talent_v4beta1/gapic/event_service_client.py index 3acdec7d3aae..3db94322a58b 100644 --- a/talent/google/cloud/talent_v4beta1/gapic/event_service_client.py +++ b/talent/google/cloud/talent_v4beta1/gapic/event_service_client.py @@ -240,10 +240,8 @@ def create_client_event( parent (str): Required. Resource name of the tenant under which the event is created. The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenant/foo". - - Tenant id is optional and a default tenant is created if unspecified, - for example, "projects/api-test-project". + example, "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created, for example, "projects/foo". client_event (Union[dict, ~google.cloud.talent_v4beta1.types.ClientEvent]): Required. Events issued when end user interacts with customer's application that uses Cloud Talent Solution. diff --git a/talent/google/cloud/talent_v4beta1/gapic/job_service_client.py b/talent/google/cloud/talent_v4beta1/gapic/job_service_client.py index ae4832568656..98017d367c7f 100644 --- a/talent/google/cloud/talent_v4beta1/gapic/job_service_client.py +++ b/talent/google/cloud/talent_v4beta1/gapic/job_service_client.py @@ -282,11 +282,9 @@ def create_job( parent (str): Required. The resource name of the tenant under which the job is created. - The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenant/foo". - - Tenant id is optional and a default tenant is created if unspecified, - for example, "projects/api-test-project". + The format is "projects/{project\_id}/tenants/{tenant\_id}". For + example, "projects/foo/tenant/bar". If tenant id is unspecified a + default tenant is created. For example, "projects/foo". job (Union[dict, ~google.cloud.talent_v4beta1.types.Job]): Required. The Job to be created. If a dict is provided, it must be of the same form as the protobuf @@ -363,11 +361,11 @@ def get_job( name (str): Required. The resource name of the job to retrieve. The format is - "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}", for - example, "projects/api-test-project/tenants/foo/jobs/1234". + "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}". For + example, "projects/foo/tenants/bar/jobs/baz". - Tenant id is optional and the default tenant is used if unspecified, for - example, "projects/api-test-project/jobs/1234". + If tenant id is unspecified, the default tenant is used. For example, + "projects/foo/jobs/bar". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -445,8 +443,7 @@ def update_job( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Job` - update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended to be provided for the best service - experience. + update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Strongly recommended for the best service experience. If ``update_mask`` is provided, only the specified fields in ``job`` are updated. Otherwise all the fields are updated. @@ -530,11 +527,11 @@ def delete_job( name (str): Required. The resource name of the job to be deleted. The format is - "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}", for - example, "projects/api-test-project/tenants/foo/jobs/1234". + "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}". For + example, "projects/foo/tenants/bar/jobs/baz". - Tenant id is optional and the default tenant is used if unspecified, for - example, "projects/api-test-project/jobs/1234". + If tenant id is unspecified, the default tenant is used. For example, + "projects/foo/jobs/bar". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -621,11 +618,9 @@ def list_jobs( parent (str): Required. The resource name of the tenant under which the job is created. - The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenant/foo". - - Tenant id is optional and the default tenant is used if unspecified, for - example, "projects/api-test-project". + The format is "projects/{project\_id}/tenants/{tenant\_id}". For + example, "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created. For example, "projects/foo". filter_ (str): Required. The filter string specifies the jobs to be enumerated. Supported operator: =, AND @@ -633,25 +628,24 @@ def list_jobs( The fields eligible for filtering are: - ``companyName`` (Required) - - ``requisitionId`` (Optional) - - ``status`` (Optional) Available values: OPEN, EXPIRED, ALL. Defaults - to OPEN if no value is specified. + - ``requisitionId`` + - ``status`` Available values: OPEN, EXPIRED, ALL. Defaults to OPEN if + no value is specified. Sample Query: - - companyName = "projects/api-test-project/tenants/foo/companies/bar" - - companyName = "projects/api-test-project/tenants/foo/companies/bar" - AND requisitionId = "req-1" - - companyName = "projects/api-test-project/tenants/foo/companies/bar" - AND status = "EXPIRED" + - companyName = "projects/foo/tenants/bar/companies/baz" + - companyName = "projects/foo/tenants/bar/companies/baz" AND + requisitionId = "req-1" + - companyName = "projects/foo/tenants/bar/companies/baz" AND status = + "EXPIRED" page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. - job_view (~google.cloud.talent_v4beta1.types.JobView): Optional. The desired job attributes returned for jobs in the search - response. Defaults to ``JobView.JOB_VIEW_FULL`` if no value is - specified. + job_view (~google.cloud.talent_v4beta1.types.JobView): The desired job attributes returned for jobs in the search response. + Defaults to ``JobView.JOB_VIEW_FULL`` if no value is specified. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -743,11 +737,9 @@ def batch_delete_jobs( parent (str): Required. The resource name of the tenant under which the job is created. - The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenant/foo". - - Tenant id is optional and the default tenant is used if unspecified, for - example, "projects/api-test-project". + The format is "projects/{project\_id}/tenants/{tenant\_id}". For + example, "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created. For example, "projects/foo". filter_ (str): Required. The filter string specifies the jobs to be deleted. Supported operator: =, AND @@ -757,8 +749,8 @@ def batch_delete_jobs( - ``companyName`` (Required) - ``requisitionId`` (Required) - Sample Query: companyName = "projects/api-test-project/companies/123" - AND requisitionId = "req-1" + Sample Query: companyName = "projects/foo/companies/bar" AND + requisitionId = "req-1" retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -857,32 +849,29 @@ def search_jobs( Args: parent (str): Required. The resource name of the tenant to search within. - The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenant/foo". - - Tenant id is optional and the default tenant is used if unspecified, for - example, "projects/api-test-project". + The format is "projects/{project\_id}/tenants/{tenant\_id}". For + example, "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created. For example, "projects/foo". request_metadata (Union[dict, ~google.cloud.talent_v4beta1.types.RequestMetadata]): Required. The meta information collected about the job searcher, used to improve the search quality of the service. The identifiers (such as ``user_id``) are provided by users, and must be unique and consistent. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.RequestMetadata` - search_mode (~google.cloud.talent_v4beta1.types.SearchMode): Optional. Mode of a search. + search_mode (~google.cloud.talent_v4beta1.types.SearchMode): Mode of a search. Defaults to ``SearchMode.JOB_SEARCH``. - job_query (Union[dict, ~google.cloud.talent_v4beta1.types.JobQuery]): Optional. Query used to search against jobs, such as keyword, location - filters, etc. + job_query (Union[dict, ~google.cloud.talent_v4beta1.types.JobQuery]): Query used to search against jobs, such as keyword, location filters, etc. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.JobQuery` - enable_broadening (bool): Optional. Controls whether to broaden the search when it produces sparse - results. Broadened queries append results to the end of the matching - results list. + enable_broadening (bool): Controls whether to broaden the search when it produces sparse results. + Broadened queries append results to the end of the matching results + list. Defaults to false. - require_precise_result_size (bool): Optional. Controls if the search job request requires the return of a - precise count of the first 300 results. Setting this to ``true`` ensures + require_precise_result_size (bool): Controls if the search job request requires the return of a precise + count of the first 300 results. Setting this to ``true`` ensures consistency in the number of results per page. Best practice is to set this value to true if a client allows users to jump directly to a non-sequential search results page. @@ -890,8 +879,7 @@ def search_jobs( Enabling this flag may adversely impact performance. Defaults to false. - histogram_queries (list[Union[dict, ~google.cloud.talent_v4beta1.types.HistogramQuery]]): Optional. An expression specifies a histogram request against matching - jobs. + histogram_queries (list[Union[dict, ~google.cloud.talent_v4beta1.types.HistogramQuery]]): An expression specifies a histogram request against matching jobs. Expression syntax is an aggregation function call with histogram facets and other options. @@ -985,13 +973,11 @@ def search_jobs( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.HistogramQuery` - job_view (~google.cloud.talent_v4beta1.types.JobView): Optional. The desired job attributes returned for jobs in the search - response. Defaults to ``JobView.JOB_VIEW_SMALL`` if no value is - specified. - offset (int): Optional. An integer that specifies the current offset (that is, - starting result location, amongst the jobs deemed by the API as - relevant) in search results. This field is only considered if - ``page_token`` is unset. + job_view (~google.cloud.talent_v4beta1.types.JobView): The desired job attributes returned for jobs in the search response. + Defaults to ``JobView.JOB_VIEW_SMALL`` if no value is specified. + offset (int): An integer that specifies the current offset (that is, starting result + location, amongst the jobs deemed by the API as relevant) in search + results. This field is only considered if ``page_token`` is unset. For example, 0 means to return results starting from the first matching job, and 10 means to return from the 11th job. This can be used for @@ -1002,8 +988,8 @@ def search_jobs( resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. - order_by (str): Optional. The criteria determining how search results are sorted. - Default is ``"relevance desc"``. + order_by (str): The criteria determining how search results are sorted. Default is + ``"relevance desc"``. Supported options are: @@ -1052,28 +1038,27 @@ def search_jobs( will be ranked at the bottom. Distance is calculated with a precision of 11.3 meters (37.4 feet). Diversification strategy is still applied unless explicitly disabled in ``diversification_level``. - diversification_level (~google.cloud.talent_v4beta1.types.DiversificationLevel): Optional. Controls whether highly similar jobs are returned next to each - other in the search results. Jobs are identified as highly similar based - on their titles, job categories, and locations. Highly similar results - are clustered so that only one representative job of the cluster is + diversification_level (~google.cloud.talent_v4beta1.types.DiversificationLevel): Controls whether highly similar jobs are returned next to each other in + the search results. Jobs are identified as highly similar based on their + titles, job categories, and locations. Highly similar results are + clustered so that only one representative job of the cluster is displayed to the job seeker higher up in the results, with the other jobs being displayed lower down in the results. Defaults to ``DiversificationLevel.SIMPLE`` if no value is specified. - custom_ranking_info (Union[dict, ~google.cloud.talent_v4beta1.types.CustomRankingInfo]): Optional. Controls over how job documents get ranked on top of existing - relevance score (determined by API algorithm). + custom_ranking_info (Union[dict, ~google.cloud.talent_v4beta1.types.CustomRankingInfo]): Controls over how job documents get ranked on top of existing relevance + score (determined by API algorithm). If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.CustomRankingInfo` - disable_keyword_match (bool): Optional. Controls whether to disable exact keyword match on - ``Job.title``, ``Job.description``, ``Job.company_display_name``, - ``Job.addresses``, ``Job.qualifications``. When disable keyword match is - turned off, a keyword match returns jobs that do not match given - category filters when there are matching keywords. For example, for the - query "program manager," a result is returned even if the job posting - has the title "software developer," which doesn't fall into "program - manager" ontology, but does have "program manager" appearing in its - description. + disable_keyword_match (bool): Controls whether to disable exact keyword match on ``Job.title``, + ``Job.description``, ``Job.company_display_name``, ``Job.addresses``, + ``Job.qualifications``. When disable keyword match is turned off, a + keyword match returns jobs that do not match given category filters when + there are matching keywords. For example, for the query "program + manager," a result is returned even if the job posting has the title + "software developer," which doesn't fall into "program manager" + ontology, but does have "program manager" appearing in its description. For queries like "cloud" that don't contain title or location specific ontology, jobs with "cloud" keyword matches are returned regardless of @@ -1220,32 +1205,29 @@ def search_jobs_for_alert( Args: parent (str): Required. The resource name of the tenant to search within. - The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenant/foo". - - Tenant id is optional and the default tenant is used if unspecified, for - example, "projects/api-test-project". + The format is "projects/{project\_id}/tenants/{tenant\_id}". For + example, "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created. For example, "projects/foo". request_metadata (Union[dict, ~google.cloud.talent_v4beta1.types.RequestMetadata]): Required. The meta information collected about the job searcher, used to improve the search quality of the service. The identifiers (such as ``user_id``) are provided by users, and must be unique and consistent. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.RequestMetadata` - search_mode (~google.cloud.talent_v4beta1.types.SearchMode): Optional. Mode of a search. + search_mode (~google.cloud.talent_v4beta1.types.SearchMode): Mode of a search. Defaults to ``SearchMode.JOB_SEARCH``. - job_query (Union[dict, ~google.cloud.talent_v4beta1.types.JobQuery]): Optional. Query used to search against jobs, such as keyword, location - filters, etc. + job_query (Union[dict, ~google.cloud.talent_v4beta1.types.JobQuery]): Query used to search against jobs, such as keyword, location filters, etc. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.JobQuery` - enable_broadening (bool): Optional. Controls whether to broaden the search when it produces sparse - results. Broadened queries append results to the end of the matching - results list. + enable_broadening (bool): Controls whether to broaden the search when it produces sparse results. + Broadened queries append results to the end of the matching results + list. Defaults to false. - require_precise_result_size (bool): Optional. Controls if the search job request requires the return of a - precise count of the first 300 results. Setting this to ``true`` ensures + require_precise_result_size (bool): Controls if the search job request requires the return of a precise + count of the first 300 results. Setting this to ``true`` ensures consistency in the number of results per page. Best practice is to set this value to true if a client allows users to jump directly to a non-sequential search results page. @@ -1253,8 +1235,7 @@ def search_jobs_for_alert( Enabling this flag may adversely impact performance. Defaults to false. - histogram_queries (list[Union[dict, ~google.cloud.talent_v4beta1.types.HistogramQuery]]): Optional. An expression specifies a histogram request against matching - jobs. + histogram_queries (list[Union[dict, ~google.cloud.talent_v4beta1.types.HistogramQuery]]): An expression specifies a histogram request against matching jobs. Expression syntax is an aggregation function call with histogram facets and other options. @@ -1348,13 +1329,11 @@ def search_jobs_for_alert( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.HistogramQuery` - job_view (~google.cloud.talent_v4beta1.types.JobView): Optional. The desired job attributes returned for jobs in the search - response. Defaults to ``JobView.JOB_VIEW_SMALL`` if no value is - specified. - offset (int): Optional. An integer that specifies the current offset (that is, - starting result location, amongst the jobs deemed by the API as - relevant) in search results. This field is only considered if - ``page_token`` is unset. + job_view (~google.cloud.talent_v4beta1.types.JobView): The desired job attributes returned for jobs in the search response. + Defaults to ``JobView.JOB_VIEW_SMALL`` if no value is specified. + offset (int): An integer that specifies the current offset (that is, starting result + location, amongst the jobs deemed by the API as relevant) in search + results. This field is only considered if ``page_token`` is unset. For example, 0 means to return results starting from the first matching job, and 10 means to return from the 11th job. This can be used for @@ -1365,8 +1344,8 @@ def search_jobs_for_alert( resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. - order_by (str): Optional. The criteria determining how search results are sorted. - Default is ``"relevance desc"``. + order_by (str): The criteria determining how search results are sorted. Default is + ``"relevance desc"``. Supported options are: @@ -1415,28 +1394,27 @@ def search_jobs_for_alert( will be ranked at the bottom. Distance is calculated with a precision of 11.3 meters (37.4 feet). Diversification strategy is still applied unless explicitly disabled in ``diversification_level``. - diversification_level (~google.cloud.talent_v4beta1.types.DiversificationLevel): Optional. Controls whether highly similar jobs are returned next to each - other in the search results. Jobs are identified as highly similar based - on their titles, job categories, and locations. Highly similar results - are clustered so that only one representative job of the cluster is + diversification_level (~google.cloud.talent_v4beta1.types.DiversificationLevel): Controls whether highly similar jobs are returned next to each other in + the search results. Jobs are identified as highly similar based on their + titles, job categories, and locations. Highly similar results are + clustered so that only one representative job of the cluster is displayed to the job seeker higher up in the results, with the other jobs being displayed lower down in the results. Defaults to ``DiversificationLevel.SIMPLE`` if no value is specified. - custom_ranking_info (Union[dict, ~google.cloud.talent_v4beta1.types.CustomRankingInfo]): Optional. Controls over how job documents get ranked on top of existing - relevance score (determined by API algorithm). + custom_ranking_info (Union[dict, ~google.cloud.talent_v4beta1.types.CustomRankingInfo]): Controls over how job documents get ranked on top of existing relevance + score (determined by API algorithm). If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.CustomRankingInfo` - disable_keyword_match (bool): Optional. Controls whether to disable exact keyword match on - ``Job.title``, ``Job.description``, ``Job.company_display_name``, - ``Job.addresses``, ``Job.qualifications``. When disable keyword match is - turned off, a keyword match returns jobs that do not match given - category filters when there are matching keywords. For example, for the - query "program manager," a result is returned even if the job posting - has the title "software developer," which doesn't fall into "program - manager" ontology, but does have "program manager" appearing in its - description. + disable_keyword_match (bool): Controls whether to disable exact keyword match on ``Job.title``, + ``Job.description``, ``Job.company_display_name``, ``Job.addresses``, + ``Job.qualifications``. When disable keyword match is turned off, a + keyword match returns jobs that do not match given category filters when + there are matching keywords. For example, for the query "program + manager," a result is returned even if the job posting has the title + "software developer," which doesn't fall into "program manager" + ontology, but does have "program manager" appearing in its description. For queries like "cloud" that don't contain title or location specific ontology, jobs with "cloud" keyword matches are returned regardless of @@ -1561,11 +1539,9 @@ def batch_create_jobs( parent (str): Required. The resource name of the tenant under which the job is created. - The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenant/foo". - - Tenant id is optional and a default tenant is created if unspecified, - for example, "projects/api-test-project". + The format is "projects/{project\_id}/tenants/{tenant\_id}". For + example, "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created. For example, "projects/foo". jobs (list[Union[dict, ~google.cloud.talent_v4beta1.types.Job]]): Required. The jobs to be created. If a dict is provided, it must be of the same form as the protobuf @@ -1661,17 +1637,15 @@ def batch_update_jobs( parent (str): Required. The resource name of the tenant under which the job is created. - The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenant/foo". - - Tenant id is optional and the default tenant is used if unspecified, for - example, "projects/api-test-project". - jobs (list[Union[dict, ~google.cloud.talent_v4beta1.types.Job]]): Required. The jobs to be updated. + The format is "projects/{project\_id}/tenants/{tenant\_id}". For + example, "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created. For example, "projects/foo". + jobs (list[Union[dict, ~google.cloud.talent_v4beta1.types.Job]]): The jobs to be updated. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Job` - update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended to be provided for the best service - experience, also increase latency when checking status of batch + update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Strongly recommended for the best service experience. Be aware that it + will also increase latency when checking the status of a batch operation. If ``update_mask`` is provided, only the specified fields in ``Job`` are diff --git a/talent/google/cloud/talent_v4beta1/gapic/profile_service_client.py b/talent/google/cloud/talent_v4beta1/gapic/profile_service_client.py index 6df1f438f138..307bc488640c 100644 --- a/talent/google/cloud/talent_v4beta1/gapic/profile_service_client.py +++ b/talent/google/cloud/talent_v4beta1/gapic/profile_service_client.py @@ -228,6 +228,7 @@ def __init__( def list_profiles( self, parent, + filter_=None, page_size=None, read_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, @@ -262,15 +263,32 @@ def list_profiles( parent (str): Required. The resource name of the tenant under which the profile is created. - The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenants/foo". + The format is "projects/{project\_id}/tenants/{tenant\_id}". For + example, "projects/foo/tenants/bar". + filter_ (str): The filter string specifies the profiles to be enumerated. + + Supported operator: =, AND + + The field(s) eligible for filtering are: + + - ``externalId`` + - ``groupId`` + + externalId and groupId cannot be specified at the same time. If both + externalId and groupId are provided, the API will return a bad request + error. + + Sample Query: + + - externalId = "externalId-1" + - groupId = "groupId-1" page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. - read_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional. A field mask to specify the profile fields to be listed in - response. All fields are listed if it is unset. + read_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): A field mask to specify the profile fields to be listed in response. All + fields are listed if it is unset. Valid values are: @@ -312,7 +330,7 @@ def list_profiles( ) request = profile_service_pb2.ListProfilesRequest( - parent=parent, page_size=page_size, read_mask=read_mask + parent=parent, filter=filter_, page_size=page_size, read_mask=read_mask ) if metadata is None: metadata = [] @@ -368,8 +386,8 @@ def create_profile( Args: parent (str): Required. The name of the tenant this profile belongs to. - The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenants/foo". + The format is "projects/{project\_id}/tenants/{tenant\_id}". For + example, "projects/foo/tenants/bar". profile (Union[dict, ~google.cloud.talent_v4beta1.types.Profile]): Required. The profile to be created. If a dict is provided, it must be of the same form as the protobuf @@ -447,8 +465,8 @@ def get_profile( name (str): Required. Resource name of the profile to get. The format is - "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile\_id}", - for example, "projects/api-test-project/tenants/foo/profiles/bar". + "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile\_id}". + For example, "projects/foo/tenants/bar/profiles/baz". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -523,7 +541,7 @@ def update_profile( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Profile` - update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional. A field mask to specify the profile fields to update. + update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): A field mask to specify the profile fields to update. A full update is performed if it is unset. @@ -531,10 +549,13 @@ def update_profile( - external\_id - source + - source\_types - uri - is\_hirable - create\_time - update\_time + - candidate\_update\_time + - resume\_update\_time - resume - person\_names - addresses @@ -650,8 +671,8 @@ def delete_profile( name (str): Required. Resource name of the profile to be deleted. The format is - "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile\_id}", - for example, "projects/api-test-project/tenants/foo/profiles/bar". + "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile\_id}". + For example, "projects/foo/tenants/bar/profiles/baz". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -750,16 +771,15 @@ def search_profiles( Args: parent (str): Required. The resource name of the tenant to search within. - The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenants/foo". + The format is "projects/{project\_id}/tenants/{tenant\_id}". For + example, "projects/foo/tenants/bar". request_metadata (Union[dict, ~google.cloud.talent_v4beta1.types.RequestMetadata]): Required. The meta information collected about the profile search user. This is used to improve the search quality of the service. These values are provided by users, and must be precise and consistent. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.RequestMetadata` - profile_query (Union[dict, ~google.cloud.talent_v4beta1.types.ProfileQuery]): Optional. Search query to execute. See ``ProfileQuery`` for more - details. + profile_query (Union[dict, ~google.cloud.talent_v4beta1.types.ProfileQuery]): Search query to execute. See ``ProfileQuery`` for more details. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.ProfileQuery` @@ -768,9 +788,9 @@ def search_profiles( resource, this parameter does not affect the return value. If page streaming is performed per-page, this determines the maximum number of resources in a page. - offset (int): Optional. An integer that specifies the current offset (that is, - starting result) in search results. This field is only considered if - ``page_token`` is unset. + offset (int): An integer that specifies the current offset (that is, starting result) + in search results. This field is only considered if ``page_token`` is + unset. The maximum allowed value is 5000. Otherwise an error is thrown. @@ -778,12 +798,12 @@ def search_profiles( search from the 11th profile. This can be used for pagination, for example pageSize = 10 and offset = 10 means to search from the second page. - disable_spell_check (bool): Optional. This flag controls the spell-check feature. If ``false``, the - service attempts to correct a misspelled query. + disable_spell_check (bool): This flag controls the spell-check feature. If ``false``, the service + attempts to correct a misspelled query. For example, "enginee" is corrected to "engineer". - order_by (str): Optional. The criteria that determines how search results are sorted. - Defaults is "relevance desc" if no value is specified. + order_by (str): The criteria that determines how search results are sorted. Defaults is + "relevance desc" if no value is specified. Supported options are: @@ -801,14 +821,13 @@ def search_profiles( in ascending order. - "last\_name desc": Sort by ``PersonName.PersonStructuredName.family_name`` in ascending order. - case_sensitive_sort (bool): Optional. When sort by field is based on alphabetical order, sort values - case sensitively (based on ASCII) when the value is set to true. Default - value is case in-sensitive sort (false). - histogram_queries (list[Union[dict, ~google.cloud.talent_v4beta1.types.HistogramQuery]]): Optional. A list of expressions specifies histogram requests against - matching profiles for ``SearchProfilesRequest``. + case_sensitive_sort (bool): When sort by field is based on alphabetical order, sort values case + sensitively (based on ASCII) when the value is set to true. Default value + is case in-sensitive sort (false). + histogram_queries (list[Union[dict, ~google.cloud.talent_v4beta1.types.HistogramQuery]]): A list of expressions specifies histogram requests against matching + profiles for ``SearchProfilesRequest``. - The expression syntax looks like a function definition with optional - parameters. + The expression syntax looks like a function definition with parameters. Function syntax: function\_name(histogram\_facet[, list of buckets]) @@ -884,10 +903,9 @@ def search_profiles( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.HistogramQuery` - result_set_id (str): Optional. An id that uniquely identifies the result set of a - ``SearchProfiles`` call. The id should be retrieved from the - ``SearchProfilesResponse`` message returned from a previous invocation - of ``SearchProfiles``. + result_set_id (str): An id that uniquely identifies the result set of a ``SearchProfiles`` + call. The id should be retrieved from the ``SearchProfilesResponse`` + message returned from a previous invocation of ``SearchProfiles``. A result set is an ordered list of search results. @@ -903,7 +921,7 @@ def search_profiles( A typical use case is to invoke ``SearchProfilesRequest`` without this field, then use the resulting ``result_set_id`` in ``SearchProfilesResponse`` to page through the results. - strict_keywords_search (bool): Optional. This flag is used to indicate whether the service will attempt to + strict_keywords_search (bool): This flag is used to indicate whether the service will attempt to understand synonyms and terms related to the search query or treat the query "as is" when it generates a set of results. By default this flag is set to false, thus allowing expanded results to also be returned. For diff --git a/talent/google/cloud/talent_v4beta1/gapic/tenant_service_client.py b/talent/google/cloud/talent_v4beta1/gapic/tenant_service_client.py index 2ff8a7f962a4..69a4ba42dc2b 100644 --- a/talent/google/cloud/talent_v4beta1/gapic/tenant_service_client.py +++ b/talent/google/cloud/talent_v4beta1/gapic/tenant_service_client.py @@ -249,8 +249,7 @@ def create_tenant( parent (str): Required. Resource name of the project under which the tenant is created. - The format is "projects/{project\_id}", for example, - "projects/api-test-project". + The format is "projects/{project\_id}", for example, "projects/foo". tenant (Union[dict, ~google.cloud.talent_v4beta1.types.Tenant]): Required. The tenant to be created. If a dict is provided, it must be of the same form as the protobuf @@ -326,7 +325,7 @@ def get_tenant( name (str): Required. The resource name of the tenant to be retrieved. The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenants/foo". + example, "projects/foo/tenants/bar". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -402,7 +401,7 @@ def update_tenant( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.talent_v4beta1.types.Tenant` - update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Optional but strongly recommended for the best service experience. + update_mask (Union[dict, ~google.cloud.talent_v4beta1.types.FieldMask]): Strongly recommended for the best service experience. If ``update_mask`` is provided, only the specified fields in ``tenant`` are updated. Otherwise all the fields are updated. @@ -485,7 +484,7 @@ def delete_tenant( name (str): Required. The resource name of the tenant to be deleted. The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenants/foo". + example, "projects/foo/tenants/bar". retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -567,8 +566,7 @@ def list_tenants( parent (str): Required. Resource name of the project under which the tenant is created. - The format is "projects/{project\_id}", for example, - "projects/api-test-project". + The format is "projects/{project\_id}", for example, "projects/foo". page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page diff --git a/talent/google/cloud/talent_v4beta1/proto/application.proto b/talent/google/cloud/talent_v4beta1/proto/application.proto index 30ce2fed8d1e..b2c7ce355dae 100644 --- a/talent/google/cloud/talent_v4beta1/proto/application.proto +++ b/talent/google/cloud/talent_v4beta1/proto/application.proto @@ -18,6 +18,7 @@ syntax = "proto3"; package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; import "google/cloud/talent/v4beta1/common.proto"; import "google/protobuf/timestamp.proto"; import "google/protobuf/wrappers.proto"; @@ -92,31 +93,30 @@ message Application { // Resource name assigned to an application by the API. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}/applications/{application_id}", - // for example, - // "projects/api-test-project/tenants/foo/profiles/bar/applications/baz". + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}/applications/{application_id}". + // For example, "projects/foo/tenants/bar/profiles/baz/applications/qux". string name = 1; // Required. Client side application identifier, used to uniquely identify the // application. // // The maximum number of allowed characters is 255. - string external_id = 31; + string external_id = 31 [(google.api.field_behavior) = REQUIRED]; // Output only. Resource name of the candidate of this application. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", - // for example, "projects/api-test-project/tenants/foo/profiles/bar". - string profile = 2; + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}". + // For example, "projects/foo/tenants/bar/profiles/baz". + string profile = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // One of either a job or a company is required. // // Resource name of the job which the candidate applied for. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}", - // for example, "projects/api-test-project/tenants/foo/jobs/bar". + // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}". For example, + // "projects/foo/tenants/bar/jobs/baz". string job = 4; // One of either a job or a company is required. @@ -124,53 +124,54 @@ message Application { // Resource name of the company which the candidate applied for. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", - // for example, "projects/api-test-project/tenants/foo/companies/bar". + // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}". + // For example, "projects/foo/tenants/bar/companies/baz". string company = 5; - // Optional. The application date. + // The application date. google.type.Date application_date = 7; // Required. What is the most recent stage of the application (that is, new, // screen, send cv, hired, finished work)? This field is intentionally not // comprehensive of every possible status, but instead, represents statuses // that would be used to indicate to the ML models good / bad matches. - ApplicationStage stage = 11; + ApplicationStage stage = 11 [(google.api.field_behavior) = REQUIRED]; - // Optional. The application state. + // The application state. ApplicationState state = 13; - // Optional. All interviews (screen, onsite, and so on) conducted as part of - // this application (includes details such as user conducting the interview, + // All interviews (screen, onsite, and so on) conducted as part of this + // application (includes details such as user conducting the interview, // timestamp, feedback, and so on). repeated Interview interviews = 16; - // Optional. If the candidate is referred by a employee. + // If the candidate is referred by a employee. google.protobuf.BoolValue referral = 18; // Required. Reflects the time that the application was created. - google.protobuf.Timestamp create_time = 19; + google.protobuf.Timestamp create_time = 19 + [(google.api.field_behavior) = REQUIRED]; - // Optional. The last update timestamp. + // The last update timestamp. google.protobuf.Timestamp update_time = 20; - // Optional. Free text reason behind the recruitement outcome (for example, - // reason for withdraw / reject, reason for an unsuccessful finish, and so - // on). + // Free text reason behind the recruitement outcome (for example, reason for + // withdraw / reject, reason for an unsuccessful finish, and so on). // // Number of characters allowed is 100. string outcome_notes = 21; - // Optional. Outcome positiveness shows how positive the outcome is. + // Outcome positiveness shows how positive the outcome is. Outcome outcome = 22; // Output only. Indicates whether this job application is a match to // application related filters. This value is only applicable in profile // search response. - google.protobuf.BoolValue is_match = 28; + google.protobuf.BoolValue is_match = 28 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Job title snippet shows how the job title is related to a // search query. It's empty if the job title isn't related to the search // query. - string job_title_snippet = 29; + string job_title_snippet = 29 [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/talent/google/cloud/talent_v4beta1/proto/application_pb2.py b/talent/google/cloud/talent_v4beta1/proto/application_pb2.py index f41b59fe646b..b2fa82022b09 100644 --- a/talent/google/cloud/talent_v4beta1/proto/application_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/application_pb2.py @@ -16,6 +16,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.talent_v4beta1.proto import ( common_pb2 as google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2, ) @@ -32,10 +33,11 @@ "\n\037com.google.cloud.talent.v4beta1B\030ApplicationResourceProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n3google/cloud/talent_v4beta1/proto/application.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x16google/type/date.proto"\xc2\x07\n\x0b\x41pplication\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x65xternal_id\x18\x1f \x01(\t\x12\x0f\n\x07profile\x18\x02 \x01(\t\x12\x0b\n\x03job\x18\x04 \x01(\t\x12\x0f\n\x07\x63ompany\x18\x05 \x01(\t\x12+\n\x10\x61pplication_date\x18\x07 \x01(\x0b\x32\x11.google.type.Date\x12H\n\x05stage\x18\x0b \x01(\x0e\x32\x39.google.cloud.talent.v4beta1.Application.ApplicationStage\x12H\n\x05state\x18\r \x01(\x0e\x32\x39.google.cloud.talent.v4beta1.Application.ApplicationState\x12:\n\ninterviews\x18\x10 \x03(\x0b\x32&.google.cloud.talent.v4beta1.Interview\x12,\n\x08referral\x18\x12 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12/\n\x0b\x63reate_time\x18\x13 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x14 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x15\n\routcome_notes\x18\x15 \x01(\t\x12\x35\n\x07outcome\x18\x16 \x01(\x0e\x32$.google.cloud.talent.v4beta1.Outcome\x12,\n\x08is_match\x18\x1c \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x19\n\x11job_title_snippet\x18\x1d \x01(\t"\x90\x01\n\x10\x41pplicationState\x12!\n\x1d\x41PPLICATION_STATE_UNSPECIFIED\x10\x00\x12\x0f\n\x0bIN_PROGRESS\x10\x01\x12\x16\n\x12\x43\x41NDIDATE_WITHDREW\x10\x02\x12\x15\n\x11\x45MPLOYER_WITHDREW\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x43LOSED\x10\x05"\xa9\x01\n\x10\x41pplicationStage\x12!\n\x1d\x41PPLICATION_STAGE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\n\n\x06SCREEN\x10\x02\x12\x19\n\x15HIRING_MANAGER_REVIEW\x10\x03\x12\r\n\tINTERVIEW\x10\x04\x12\x12\n\x0eOFFER_EXTENDED\x10\x05\x12\x12\n\x0eOFFER_ACCEPTED\x10\x06\x12\x0b\n\x07STARTED\x10\x07\x42\x86\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x18\x41pplicationResourceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n3google/cloud/talent_v4beta1/proto/application.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x16google/type/date.proto"\xe0\x07\n\x0b\x41pplication\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x0b\x65xternal_id\x18\x1f \x01(\tB\x03\xe0\x41\x02\x12\x14\n\x07profile\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x0b\n\x03job\x18\x04 \x01(\t\x12\x0f\n\x07\x63ompany\x18\x05 \x01(\t\x12+\n\x10\x61pplication_date\x18\x07 \x01(\x0b\x32\x11.google.type.Date\x12M\n\x05stage\x18\x0b \x01(\x0e\x32\x39.google.cloud.talent.v4beta1.Application.ApplicationStageB\x03\xe0\x41\x02\x12H\n\x05state\x18\r \x01(\x0e\x32\x39.google.cloud.talent.v4beta1.Application.ApplicationState\x12:\n\ninterviews\x18\x10 \x03(\x0b\x32&.google.cloud.talent.v4beta1.Interview\x12,\n\x08referral\x18\x12 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x34\n\x0b\x63reate_time\x18\x13 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12/\n\x0bupdate_time\x18\x14 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x15\n\routcome_notes\x18\x15 \x01(\t\x12\x35\n\x07outcome\x18\x16 \x01(\x0e\x32$.google.cloud.talent.v4beta1.Outcome\x12\x31\n\x08is_match\x18\x1c \x01(\x0b\x32\x1a.google.protobuf.BoolValueB\x03\xe0\x41\x03\x12\x1e\n\x11job_title_snippet\x18\x1d \x01(\tB\x03\xe0\x41\x03"\x90\x01\n\x10\x41pplicationState\x12!\n\x1d\x41PPLICATION_STATE_UNSPECIFIED\x10\x00\x12\x0f\n\x0bIN_PROGRESS\x10\x01\x12\x16\n\x12\x43\x41NDIDATE_WITHDREW\x10\x02\x12\x15\n\x11\x45MPLOYER_WITHDREW\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x43LOSED\x10\x05"\xa9\x01\n\x10\x41pplicationStage\x12!\n\x1d\x41PPLICATION_STAGE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\n\n\x06SCREEN\x10\x02\x12\x19\n\x15HIRING_MANAGER_REVIEW\x10\x03\x12\r\n\tINTERVIEW\x10\x04\x12\x12\n\x0eOFFER_EXTENDED\x10\x05\x12\x12\n\x0eOFFER_ACCEPTED\x10\x06\x12\x0b\n\x07STARTED\x10\x07\x42\x86\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x18\x41pplicationResourceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, @@ -83,8 +85,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=898, - serialized_end=1042, + serialized_start=961, + serialized_end=1105, ) _sym_db.RegisterEnumDescriptor(_APPLICATION_APPLICATIONSTATE) @@ -129,8 +131,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1045, - serialized_end=1214, + serialized_start=1108, + serialized_end=1277, ) _sym_db.RegisterEnumDescriptor(_APPLICATION_APPLICATIONSTAGE) @@ -175,7 +177,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -193,7 +195,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -265,7 +267,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -337,7 +339,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -409,7 +411,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -427,7 +429,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -439,8 +441,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=252, - serialized_end=1214, + serialized_start=285, + serialized_end=1277, ) _APPLICATION.fields_by_name[ @@ -487,8 +489,8 @@ Required during application update. Resource name assigned to an application by the API. The format is "projects/{project\_ id}/tenants/{tenant\_id}/profiles/{profile\_id}/applications/{ - application\_id}", for example, "projects/api-test- - project/tenants/foo/profiles/bar/applications/baz". + application\_id}". For example, + "projects/foo/tenants/bar/profiles/baz/applications/qux". external_id: Required. Client side application identifier, used to uniquely identify the application. The maximum number of allowed @@ -496,21 +498,21 @@ profile: Output only. Resource name of the candidate of this application. The format is "projects/{project\_id}/tenants/{t - enant\_id}/profiles/{profile\_id}", for example, - "projects/api-test-project/tenants/foo/profiles/bar". + enant\_id}/profiles/{profile\_id}". For example, + "projects/foo/tenants/bar/profiles/baz". job: One of either a job or a company is required. Resource name of the job which the candidate applied for. The format is - "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}", - for example, "projects/api-test-project/tenants/foo/jobs/bar". + "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}". + For example, "projects/foo/tenants/bar/jobs/baz". company: One of either a job or a company is required. Resource name of the company which the candidate applied for. The format is "projects/{project\_id}/tenants/{tenant\_id}/companies/{compan - y\_id}", for example, "projects/api-test- - project/tenants/foo/companies/bar". + y\_id}". For example, + "projects/foo/tenants/bar/companies/baz". application_date: - Optional. The application date. + The application date. stage: Required. What is the most recent stage of the application (that is, new, screen, send cv, hired, finished work)? This @@ -518,25 +520,23 @@ status, but instead, represents statuses that would be used to indicate to the ML models good / bad matches. state: - Optional. The application state. + The application state. interviews: - Optional. All interviews (screen, onsite, and so on) conducted - as part of this application (includes details such as user - conducting the interview, timestamp, feedback, and so on). + All interviews (screen, onsite, and so on) conducted as part + of this application (includes details such as user conducting + the interview, timestamp, feedback, and so on). referral: - Optional. If the candidate is referred by a employee. + If the candidate is referred by a employee. create_time: Required. Reflects the time that the application was created. update_time: - Optional. The last update timestamp. + The last update timestamp. outcome_notes: - Optional. Free text reason behind the recruitement outcome - (for example, reason for withdraw / reject, reason for an - unsuccessful finish, and so on). Number of characters allowed - is 100. + Free text reason behind the recruitement outcome (for example, + reason for withdraw / reject, reason for an unsuccessful + finish, and so on). Number of characters allowed is 100. outcome: - Optional. Outcome positiveness shows how positive the outcome - is. + Outcome positiveness shows how positive the outcome is. is_match: Output only. Indicates whether this job application is a match to application related filters. This value is only applicable @@ -553,4 +553,10 @@ DESCRIPTOR._options = None +_APPLICATION.fields_by_name["external_id"]._options = None +_APPLICATION.fields_by_name["profile"]._options = None +_APPLICATION.fields_by_name["stage"]._options = None +_APPLICATION.fields_by_name["create_time"]._options = None +_APPLICATION.fields_by_name["is_match"]._options = None +_APPLICATION.fields_by_name["job_title_snippet"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/talent/google/cloud/talent_v4beta1/proto/application_service.proto b/talent/google/cloud/talent_v4beta1/proto/application_service.proto index 48e4f9500d22..4ff7d89783ac 100644 --- a/talent/google/cloud/talent_v4beta1/proto/application_service.proto +++ b/talent/google/cloud/talent_v4beta1/proto/application_service.proto @@ -19,6 +19,7 @@ package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/cloud/talent/v4beta1/application.proto"; import "google/cloud/talent/v4beta1/common.proto"; import "google/protobuf/empty.proto"; @@ -84,12 +85,12 @@ message CreateApplicationRequest { // created. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", for - // example, "projects/test-project/tenants/test-tenant/profiles/test-profile". - string parent = 1; + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}". + // For example, "projects/foo/tenants/bar/profiles/baz". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The application to be created. - Application application = 2; + Application application = 2 [(google.api.field_behavior) = REQUIRED]; } // Request for getting a application by name. @@ -97,20 +98,18 @@ message GetApplicationRequest { // Required. The resource name of the application to be retrieved. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}/applications/{application_id}", - // for example, - // "projects/test-project/tenants/test-tenant/profiles/test-profile/applications/test-application". - string name = 1; + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}/applications/{application_id}". + // For example, "projects/foo/tenants/bar/profiles/baz/applications/qux". + string name = 1 [(google.api.field_behavior) = REQUIRED]; } // Request for updating a specified application. message UpdateApplicationRequest { // Required. The application resource to replace the current resource in the // system. - Application application = 1; + Application application = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional but strongly recommended for the best service - // experience. + // Strongly recommended for the best service experience. // // If // [update_mask][google.cloud.talent.v4beta1.UpdateApplicationRequest.update_mask] @@ -129,10 +128,9 @@ message DeleteApplicationRequest { // Required. The resource name of the application to be deleted. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}/applications/{application_id}", - // for example, - // "projects/test-project/tenants/test-tenant/profiles/test-profile/applications/test-application". - string name = 1; + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}/applications/{application_id}". + // For example, "projects/foo/tenants/bar/profiles/baz/applications/qux". + string name = 1 [(google.api.field_behavior) = REQUIRED]; } // List applications for which the client has ACL visibility. @@ -142,19 +140,17 @@ message ListApplicationsRequest { // // The format is // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", for - // example, "projects/test-project/tenants/test-tenant/profiles/test-profile". - string parent = 1; + // example, "projects/foo/tenants/bar/profiles/baz". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. The starting indicator from which to return results. + // The starting indicator from which to return results. string page_token = 2; - // Optional. The maximum number of applications to be returned, at most 100. + // The maximum number of applications to be returned, at most 100. // Default is 100 if a non-positive number is provided. int32 page_size = 3; } -// Output only. -// // The List applications response object. message ListApplicationsResponse { // Applications for the current client. diff --git a/talent/google/cloud/talent_v4beta1/proto/application_service_pb2.py b/talent/google/cloud/talent_v4beta1/proto/application_service_pb2.py index f1e3e31fbbc6..e388d93af125 100644 --- a/talent/google/cloud/talent_v4beta1/proto/application_service_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/application_service_pb2.py @@ -17,6 +17,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.talent_v4beta1.proto import ( application_pb2 as google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_application__pb2, ) @@ -35,11 +36,12 @@ "\n\037com.google.cloud.talent.v4beta1B\027ApplicationServiceProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n;google/cloud/talent_v4beta1/proto/application_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x33google/cloud/talent_v4beta1/proto/application.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"i\n\x18\x43reateApplicationRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12=\n\x0b\x61pplication\x18\x02 \x01(\x0b\x32(.google.cloud.talent.v4beta1.Application"%\n\x15GetApplicationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x8a\x01\n\x18UpdateApplicationRequest\x12=\n\x0b\x61pplication\x18\x01 \x01(\x0b\x32(.google.cloud.talent.v4beta1.Application\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"(\n\x18\x44\x65leteApplicationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"P\n\x17ListApplicationsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"\xb4\x01\n\x18ListApplicationsResponse\x12>\n\x0c\x61pplications\x18\x01 \x03(\x0b\x32(.google.cloud.talent.v4beta1.Application\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata2\xc2\x08\n\x12\x41pplicationService\x12\xbf\x01\n\x11\x43reateApplication\x12\x35.google.cloud.talent.v4beta1.CreateApplicationRequest\x1a(.google.cloud.talent.v4beta1.Application"I\x82\xd3\xe4\x93\x02\x43">/v4beta1/{parent=projects/*/tenants/*/profiles/*}/applications:\x01*\x12\xb6\x01\n\x0eGetApplication\x12\x32.google.cloud.talent.v4beta1.GetApplicationRequest\x1a(.google.cloud.talent.v4beta1.Application"F\x82\xd3\xe4\x93\x02@\x12>/v4beta1/{name=projects/*/tenants/*/profiles/*/applications/*}\x12\xcb\x01\n\x11UpdateApplication\x12\x35.google.cloud.talent.v4beta1.UpdateApplicationRequest\x1a(.google.cloud.talent.v4beta1.Application"U\x82\xd3\xe4\x93\x02O2J/v4beta1/{application.name=projects/*/tenants/*/profiles/*/applications/*}:\x01*\x12\xaa\x01\n\x11\x44\x65leteApplication\x12\x35.google.cloud.talent.v4beta1.DeleteApplicationRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02@*>/v4beta1/{name=projects/*/tenants/*/profiles/*/applications/*}\x12\xc7\x01\n\x10ListApplications\x12\x34.google.cloud.talent.v4beta1.ListApplicationsRequest\x1a\x35.google.cloud.talent.v4beta1.ListApplicationsResponse"F\x82\xd3\xe4\x93\x02@\x12>/v4beta1/{parent=projects/*/tenants/*/profiles/*}/applications\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB\x85\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x17\x41pplicationServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n;google/cloud/talent_v4beta1/proto/application_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x33google/cloud/talent_v4beta1/proto/application.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"s\n\x18\x43reateApplicationRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x42\n\x0b\x61pplication\x18\x02 \x01(\x0b\x32(.google.cloud.talent.v4beta1.ApplicationB\x03\xe0\x41\x02"*\n\x15GetApplicationRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"\x8f\x01\n\x18UpdateApplicationRequest\x12\x42\n\x0b\x61pplication\x18\x01 \x01(\x0b\x32(.google.cloud.talent.v4beta1.ApplicationB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"-\n\x18\x44\x65leteApplicationRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"U\n\x17ListApplicationsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"\xb4\x01\n\x18ListApplicationsResponse\x12>\n\x0c\x61pplications\x18\x01 \x03(\x0b\x32(.google.cloud.talent.v4beta1.Application\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata2\xc2\x08\n\x12\x41pplicationService\x12\xbf\x01\n\x11\x43reateApplication\x12\x35.google.cloud.talent.v4beta1.CreateApplicationRequest\x1a(.google.cloud.talent.v4beta1.Application"I\x82\xd3\xe4\x93\x02\x43">/v4beta1/{parent=projects/*/tenants/*/profiles/*}/applications:\x01*\x12\xb6\x01\n\x0eGetApplication\x12\x32.google.cloud.talent.v4beta1.GetApplicationRequest\x1a(.google.cloud.talent.v4beta1.Application"F\x82\xd3\xe4\x93\x02@\x12>/v4beta1/{name=projects/*/tenants/*/profiles/*/applications/*}\x12\xcb\x01\n\x11UpdateApplication\x12\x35.google.cloud.talent.v4beta1.UpdateApplicationRequest\x1a(.google.cloud.talent.v4beta1.Application"U\x82\xd3\xe4\x93\x02O2J/v4beta1/{application.name=projects/*/tenants/*/profiles/*/applications/*}:\x01*\x12\xaa\x01\n\x11\x44\x65leteApplication\x12\x35.google.cloud.talent.v4beta1.DeleteApplicationRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02@*>/v4beta1/{name=projects/*/tenants/*/profiles/*/applications/*}\x12\xc7\x01\n\x10ListApplications\x12\x34.google.cloud.talent.v4beta1.ListApplicationsRequest\x1a\x35.google.cloud.talent.v4beta1.ListApplicationsResponse"F\x82\xd3\xe4\x93\x02@\x12>/v4beta1/{parent=projects/*/tenants/*/profiles/*}/applications\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB\x85\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x17\x41pplicationServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_application__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, @@ -70,7 +72,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -88,7 +90,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -100,8 +102,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=311, - serialized_end=416, + serialized_start=344, + serialized_end=459, ) @@ -127,7 +129,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -139,8 +141,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=418, - serialized_end=455, + serialized_start=461, + serialized_end=503, ) @@ -166,7 +168,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -196,8 +198,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=458, - serialized_end=596, + serialized_start=506, + serialized_end=649, ) @@ -223,7 +225,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -235,8 +237,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=598, - serialized_end=638, + serialized_start=651, + serialized_end=696, ) @@ -262,7 +264,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -310,8 +312,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=640, - serialized_end=720, + serialized_start=698, + serialized_end=783, ) @@ -385,8 +387,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=723, - serialized_end=903, + serialized_start=786, + serialized_end=966, ) _CREATEAPPLICATIONREQUEST.fields_by_name[ @@ -433,9 +435,8 @@ parent: Required. Resource name of the profile under which the application is created. The format is "projects/{project\_id} - /tenants/{tenant\_id}/profiles/{profile\_id}", for example, - "projects/test-project/tenants/test-tenant/profiles/test- - profile". + /tenants/{tenant\_id}/profiles/{profile\_id}". For example, + "projects/foo/tenants/bar/profiles/baz". application: Required. The application to be created. """, @@ -458,8 +459,8 @@ Required. The resource name of the application to be retrieved. The format is "projects/{project\_id}/tenants/{ten ant\_id}/profiles/{profile\_id}/applications/{application\_id} - ", for example, "projects/test-project/tenants/test- - tenant/profiles/test-profile/applications/test-application". + ". For example, + "projects/foo/tenants/bar/profiles/baz/applications/qux". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.GetApplicationRequest) ), @@ -480,13 +481,13 @@ Required. The application resource to replace the current resource in the system. update_mask: - Optional but strongly recommended for the best service - experience. If [update\_mask][google.cloud.talent.v4beta1.Upd - ateApplicationRequest.update\_mask] is provided, only the - specified fields in [application][google.cloud.talent.v4beta1. - UpdateApplicationRequest.application] are updated. Otherwise - all the fields are updated. A field mask to specify the - application fields to be updated. Only top level fields of + Strongly recommended for the best service experience. If [upd + ate\_mask][google.cloud.talent.v4beta1.UpdateApplicationReques + t.update\_mask] is provided, only the specified fields in [app + lication][google.cloud.talent.v4beta1.UpdateApplicationRequest + .application] are updated. Otherwise all the fields are + updated. A field mask to specify the application fields to be + updated. Only top level fields of [Application][google.cloud.talent.v4beta1.Application] are supported. """, @@ -508,9 +509,9 @@ name: Required. The resource name of the application to be deleted. The format is "projects/{project\_id}/tenants/{tenant\_id}/pro - files/{profile\_id}/applications/{application\_id}", for - example, "projects/test-project/tenants/test- - tenant/profiles/test-profile/applications/test-application". + files/{profile\_id}/applications/{application\_id}". For + example, + "projects/foo/tenants/bar/profiles/baz/applications/qux". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.DeleteApplicationRequest) ), @@ -531,14 +532,12 @@ Required. Resource name of the profile under which the application is created. The format is "projects/{project\_id} /tenants/{tenant\_id}/profiles/{profile\_id}", for example, - "projects/test-project/tenants/test-tenant/profiles/test- - profile". + "projects/foo/tenants/bar/profiles/baz". page_token: - Optional. The starting indicator from which to return results. + The starting indicator from which to return results. page_size: - Optional. The maximum number of applications to be returned, - at most 100. Default is 100 if a non-positive number is - provided. + The maximum number of applications to be returned, at most + 100. Default is 100 if a non-positive number is provided. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.ListApplicationsRequest) ), @@ -551,9 +550,7 @@ dict( DESCRIPTOR=_LISTAPPLICATIONSRESPONSE, __module__="google.cloud.talent_v4beta1.proto.application_service_pb2", - __doc__="""Output only. - - The List applications response object. + __doc__="""The List applications response object. Attributes: @@ -572,6 +569,12 @@ DESCRIPTOR._options = None +_CREATEAPPLICATIONREQUEST.fields_by_name["parent"]._options = None +_CREATEAPPLICATIONREQUEST.fields_by_name["application"]._options = None +_GETAPPLICATIONREQUEST.fields_by_name["name"]._options = None +_UPDATEAPPLICATIONREQUEST.fields_by_name["application"]._options = None +_DELETEAPPLICATIONREQUEST.fields_by_name["name"]._options = None +_LISTAPPLICATIONSREQUEST.fields_by_name["parent"]._options = None _APPLICATIONSERVICE = _descriptor.ServiceDescriptor( name="ApplicationService", @@ -581,8 +584,8 @@ serialized_options=_b( "\312A\023jobs.googleapis.com\322AShttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobs" ), - serialized_start=906, - serialized_end=1996, + serialized_start=969, + serialized_end=2059, methods=[ _descriptor.MethodDescriptor( name="CreateApplication", diff --git a/talent/google/cloud/talent_v4beta1/proto/batch.proto b/talent/google/cloud/talent_v4beta1/proto/batch.proto index f67c980c79dc..577a304d34fb 100644 --- a/talent/google/cloud/talent_v4beta1/proto/batch.proto +++ b/talent/google/cloud/talent_v4beta1/proto/batch.proto @@ -17,6 +17,8 @@ syntax = "proto3"; package google.cloud.talent.v4beta1; +import "google/api/annotations.proto"; + option go_package = "google.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent"; option java_multiple_files = true; option java_outer_classname = "BatchProto"; diff --git a/talent/google/cloud/talent_v4beta1/proto/batch_pb2.py b/talent/google/cloud/talent_v4beta1/proto/batch_pb2.py index 975c21e834bf..8570369db736 100644 --- a/talent/google/cloud/talent_v4beta1/proto/batch_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/batch_pb2.py @@ -15,6 +15,9 @@ _sym_db = _symbol_database.Default() +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/talent_v4beta1/proto/batch.proto", package="google.cloud.talent.v4beta1", @@ -23,8 +26,9 @@ "\n\037com.google.cloud.talent.v4beta1B\nBatchProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - "\n-google/cloud/talent_v4beta1/proto/batch.proto\x12\x1bgoogle.cloud.talent.v4beta1Bx\n\x1f\x63om.google.cloud.talent.v4beta1B\nBatchProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3" + "\n-google/cloud/talent_v4beta1/proto/batch.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.protoBx\n\x1f\x63om.google.cloud.talent.v4beta1B\nBatchProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3" ), + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) diff --git a/talent/google/cloud/talent_v4beta1/proto/common.proto b/talent/google/cloud/talent_v4beta1/proto/common.proto index bf70eb472de2..9cd68549eb42 100644 --- a/talent/google/cloud/talent_v4beta1/proto/common.proto +++ b/talent/google/cloud/talent_v4beta1/proto/common.proto @@ -18,6 +18,7 @@ syntax = "proto3"; package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; import "google/protobuf/wrappers.proto"; import "google/type/date.proto"; @@ -41,8 +42,7 @@ message TimestampRange { google.protobuf.Timestamp end_time = 2; } -// Output only. A resource that represents a location with full geographic -// information. +// A resource that represents a location with full geographic information. message Location { // An enum which represents the type of a location. enum LocationType { @@ -478,8 +478,6 @@ enum ContactInfoUsage { SCHOOL = 3; } -// Input only. -// // Option for HTML content sanitization on user input fields, for example, job // description. By setting this option, user can determine whether and how // sanitization is performed on these fields. @@ -514,8 +512,6 @@ enum CommuteMethod { CYCLING = 4; } -// Input only. -// // Meta information related to the job searcher or entity // conducting the job search. This information is used to improve the // performance of the service. @@ -571,25 +567,24 @@ message RequestMetadata { // The maximum number of allowed characters is 255. string user_id = 3; - // Optional. If set to `true`, + // Only set when any of // [domain][google.cloud.talent.v4beta1.RequestMetadata.domain], // [session_id][google.cloud.talent.v4beta1.RequestMetadata.session_id] and - // [user_id][google.cloud.talent.v4beta1.RequestMetadata.user_id] are - // optional. Only set when any of these fields isn't available for some - // reason. It is highly recommended not to set this field and provide accurate + // [user_id][google.cloud.talent.v4beta1.RequestMetadata.user_id] isn't + // available for some reason. It is highly recommended not to set this field + // and provide accurate // [domain][google.cloud.talent.v4beta1.RequestMetadata.domain], // [session_id][google.cloud.talent.v4beta1.RequestMetadata.session_id] and // [user_id][google.cloud.talent.v4beta1.RequestMetadata.user_id] for the best // service experience. bool allow_missing_ids = 4; - // Optional. The type of device used by the job seeker at the time of the call - // to the service. + // The type of device used by the job seeker at the time of the call to the + // service. DeviceInfo device_info = 5; } -// Output only. Additional information returned to client, such as debugging -// information. +// Additional information returned to client, such as debugging information. message ResponseMetadata { // A unique id associated with this call. // This id is logged for tracking purposes. @@ -627,17 +622,17 @@ message DeviceInfo { OTHER = 6; } - // Optional. Type of the device. + // Type of the device. DeviceType device_type = 1; - // Optional. A device-specific ID. The ID must be a unique identifier that + // A device-specific ID. The ID must be a unique identifier that // distinguishes the device from other devices. string id = 2; } // Custom attribute values that are either filterable or non-filterable. message CustomAttribute { - // Optional but exactly one of + // Exactly one of // [string_values][google.cloud.talent.v4beta1.CustomAttribute.string_values] // or [long_values][google.cloud.talent.v4beta1.CustomAttribute.long_values] // must be specified. @@ -652,7 +647,7 @@ message CustomAttribute { // Empty string isn't allowed. repeated string string_values = 1; - // Optional but exactly one of + // Exactly one of // [string_values][google.cloud.talent.v4beta1.CustomAttribute.string_values] // or [long_values][google.cloud.talent.v4beta1.CustomAttribute.long_values] // must be specified. @@ -665,14 +660,14 @@ message CustomAttribute { // supported. repeated int64 long_values = 2; - // Optional. If the `filterable` flag is true, custom field values are - // searchable. If false, values are not searchable. + // If the `filterable` flag is true, custom field values are searchable. + // If false, values are not searchable. // // Default is false. bool filterable = 3; } -// Output only. Spell check result. +// Spell check result. message SpellingCorrection { // Indicates if the query was corrected by the spell checker. bool corrected = 1; @@ -709,35 +704,33 @@ message CompensationInfo { // times // [expected_units_per_year][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.expected_units_per_year]. message CompensationEntry { - // Optional. Compensation type. + // Compensation type. // // Default is // [CompensationType.COMPENSATION_TYPE_UNSPECIFIED][google.cloud.talent.v4beta1.CompensationInfo.CompensationType.COMPENSATION_TYPE_UNSPECIFIED]. CompensationType type = 1; - // Optional. Frequency of the specified amount. + // Frequency of the specified amount. // // Default is // [CompensationUnit.COMPENSATION_UNIT_UNSPECIFIED][google.cloud.talent.v4beta1.CompensationInfo.CompensationUnit.COMPENSATION_UNIT_UNSPECIFIED]. CompensationUnit unit = 2; - // Optional. - // // Compensation amount. It could be a fixed amount or a floating range. oneof compensation_amount { - // Optional. Compensation amount. + // Compensation amount. google.type.Money amount = 3; - // Optional. Compensation range. + // Compensation range. CompensationRange range = 4; } - // Optional. Compensation description. For example, could + // Compensation description. For example, could // indicate equity terms or provide additional context to an estimated // bonus. string description = 5; - // Optional. Expected number of units paid each year. If not specified, when + // Expected number of units paid each year. If not specified, when // [Job.employment_types][google.cloud.talent.v4beta1.Job.employment_types] // is FULLTIME, a default value is inferred based on // [unit][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.unit]. @@ -752,14 +745,15 @@ message CompensationInfo { // Compensation range. message CompensationRange { - // Optional. The maximum amount of compensation. If left empty, the value is - // set to a maximal compensation value and the currency code is set to match - // the [currency code][google.type.Money.currency_code] of min_compensation. + // The maximum amount of compensation. If left empty, the value is set + // to a maximal compensation value and the currency code is set to + // match the [currency code][google.type.Money.currency_code] of + // min_compensation. google.type.Money max_compensation = 2; - // Optional. The minimum amount of compensation. If left empty, the value is - // set to zero and the currency code is set to match the [currency - // code][google.type.Money.currency_code] of max_compensation. + // The minimum amount of compensation. If left empty, the value is set + // to zero and the currency code is set to match the + // [currency code][google.type.Money.currency_code] of max_compensation. google.type.Money min_compensation = 1; } @@ -849,15 +843,15 @@ message CompensationInfo { OTHER_COMPENSATION_UNIT = 7; } - // Optional. Job compensation information. + // Job compensation information. // // At most one entry can be of type // [CompensationInfo.CompensationType.BASE][google.cloud.talent.v4beta1.CompensationInfo.CompensationType.BASE], // which is referred as **base compensation entry** for the job. repeated CompensationEntry entries = 1; - // Output only. Annualized base compensation range. Computed as - // base compensation entry's + // Output only. Annualized base compensation range. Computed as base + // compensation entry's // [CompensationEntry.amount][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.amount] // times // [CompensationEntry.expected_units_per_year][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.expected_units_per_year]. @@ -865,10 +859,11 @@ message CompensationInfo { // See // [CompensationEntry][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry] // for explanation on compensation annualization. - CompensationRange annualized_base_compensation_range = 2; + CompensationRange annualized_base_compensation_range = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. Annualized total compensation range. Computed as - // all compensation entries' + // Output only. Annualized total compensation range. Computed as all + // compensation entries' // [CompensationEntry.amount][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.amount] // times // [CompensationEntry.expected_units_per_year][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry.expected_units_per_year]. @@ -876,28 +871,29 @@ message CompensationInfo { // See // [CompensationEntry][google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry] // for explanation on compensation annualization. - CompensationRange annualized_total_compensation_range = 3; + CompensationRange annualized_total_compensation_range = 3 + [(google.api.field_behavior) = OUTPUT_ONLY]; } // Resource that represents a license or certification. message Certification { - // Optional. Name of license or certification. + // Name of license or certification. // // Number of characters allowed is 100. string display_name = 1; - // Optional. Acquisition date or effective date of license or certification. + // Acquisition date or effective date of license or certification. google.type.Date acquire_date = 2; - // Optional. Expiration date of license of certification. + // Expiration date of license of certification. google.type.Date expire_date = 3; - // Optional. Authority of license, such as government. + // Authority of license, such as government. // // Number of characters allowed is 100. string authority = 4; - // Optional. Description of license or certification. + // Description of license or certification. // // Number of characters allowed is 100,000. string description = 5; @@ -905,21 +901,21 @@ message Certification { // Resource that represents a skill of a candidate. message Skill { - // Optional. Skill display name. + // Skill display name. // // For example, "Java", "Python". // // Number of characters allowed is 100. string display_name = 1; - // Optional. The last time this skill was used. + // The last time this skill was used. google.type.Date last_used_date = 2; - // Optional. Skill proficiency level which indicates how proficient the - // candidate is at this skill. + // Skill proficiency level which indicates how proficient the candidate is at + // this skill. SkillProficiencyLevel level = 3; - // Optional. A paragraph describes context of this skill. + // A paragraph describes context of this skill. // // Number of characters allowed is 100,000. string context = 4; @@ -929,17 +925,17 @@ message Skill { // to a search query. It's empty if the // [display_name][google.cloud.talent.v4beta1.Skill.display_name] isn't // related to the search query. - string skill_name_snippet = 5; + string skill_name_snippet = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Details of an interview. message Interview { - // Optional. The rating on this interview. + // The rating on this interview. Rating rating = 6; // Required. The overall decision resulting from this interview (positive, // negative, nuetral). - Outcome outcome = 7; + Outcome outcome = 7 [(google.api.field_behavior) = REQUIRED]; } // The details of the score received for an assessment or interview. @@ -958,8 +954,6 @@ message Rating { double interval = 4; } -// Output only. -// // Metadata used for long running operations returned by CTS batch APIs. // It's used to replace // [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata]. @@ -1072,3 +1066,97 @@ enum Outcome { // candidate did not complete assessment). OUTCOME_NOT_AVAILABLE = 4; } + +// The type of candidate availability signal. +enum AvailabilitySignalType { + // Default value. + AVAILABILITY_SIGNAL_TYPE_UNSPECIFIED = 0; + + // Job application signal. + // + // In the context of + // [Profile.availability_signals][google.cloud.talent.v4beta1.Profile.availability_signals], + // this signal is related to the candidate's most recent application. + // [last_update_time][Profile.availability_signals.last_update_time] is + // calculated from + // max([Application.create_time][google.cloud.talent.v4beta1.Application.create_time]) + // from all [Application][google.cloud.talent.v4beta1.Application] records + // where [Application.source][google.cloud.talent.v4beta1.Application.source] + // is any of the following: + // [APPLY_DIRECT_WEB][google.cloud.talent.v4beta1.Application.ApplicationSource.APPLY_DIRECT_WEB] + // [APPLY_DIRECT_MOBILE_WEB][google.cloud.talent.v4beta1.Application.ApplicationSource.APPLY_DIRECT_MOBILE_WEB] + // [APPLY_DIRECT_MOBILE_APP][google.cloud.talent.v4beta1.Application.ApplicationSource.APPLY_DIRECT_MOBILE_APP] + // [APPLY_DIRECT_IN_PERSON][google.cloud.talent.v4beta1.Application.ApplicationSource.APPLY_DIRECT_IN_PERSON] + // [APPLY_INDIRECT][google.cloud.talent.v4beta1.Application.ApplicationSource.APPLY_INDIRECT] + // + // In the context of + // [AvailabilityFilter][google.cloud.talent.v4beta1.AvailabilityFilter], the + // filter is applied on + // [Profile.availability_signals][google.cloud.talent.v4beta1.Profile.availability_signals] + // where [type][Profile.availability_signals.type] is JOB_APPLICATION. + JOB_APPLICATION = 1; + + // Resume update signal. + // + // In the context of + // [Profile.availability_signals][google.cloud.talent.v4beta1.Profile.availability_signals], + // this signal is related to the candidate’s most recent update to their + // resume. For a + // [SummarizedProfile.summary][google.cloud.talent.v4beta1.SummarizedProfile.summary], + // [last_update_time][Profile.availability_signals.last_update_time] is + // calculated from + // max([Profile.resume_update_time][google.cloud.talent.v4beta1.Profile.resume_update_time]) + // from all + // [SummarizedProfile.profiles][google.cloud.talent.v4beta1.SummarizedProfile.profiles]. + // + // In the context of + // [AvailabilityFilter][google.cloud.talent.v4beta1.AvailabilityFilter], the + // filter is applied on + // [Profile.availability_signals][google.cloud.talent.v4beta1.Profile.availability_signals] + // where [type][Profile.availability_signals.type] is RESUME_UPDATE. + RESUME_UPDATE = 2; + + // Candidate update signal. + // + // In the context of + // [Profile.availability_signals][google.cloud.talent.v4beta1.Profile.availability_signals], + // this signal is related to the candidate’s most recent update to their + // profile. For a + // [SummarizedProfile.summary][google.cloud.talent.v4beta1.SummarizedProfile.summary], + // [last_update_time][Profile.availability_signals.last_update_time] is + // calculated from + // max([Profile.candidate_update_time][google.cloud.talent.v4beta1.Profile.candidate_update_time]) + // from all + // [SummarizedProfile.profiles][google.cloud.talent.v4beta1.SummarizedProfile.profiles]. + // + // In the context of + // [AvailabilityFilter][google.cloud.talent.v4beta1.AvailabilityFilter], the + // filter is applied on + // [Profile.availability_signals][google.cloud.talent.v4beta1.Profile.availability_signals] + // where [type][Profile.availability_signals.type] is CANDIDATE_UPDATE. + CANDIDATE_UPDATE = 3; + + // Client submission signal. + // + // In the context of + // [Profile.availability_signals][google.cloud.talent.v4beta1.Profile.availability_signals], + // this signal is related to the candidate’s most recent submission. + // [last_update_time][Profile.availability_signals.last_update_time] is + // calculated from + // max([Application.create_time][google.cloud.talent.v4beta1.Application.create_time]) + // from all [Application][google.cloud.talent.v4beta1.Application] records + // where [Application.stage][google.cloud.talent.v4beta1.Application.stage] is + // any of the following: + // [HIRING_MANAGER_REVIEW][google.cloud.talent.v4beta1.Application.ApplicationStage.HIRING_MANAGER_REVIEW] + // [INTERVIEW][google.cloud.talent.v4beta1.Application.ApplicationStage.INTERVIEW] + // [OFFER_EXTENDED][google.cloud.talent.v4beta1.Application.ApplicationStage.OFFER_EXTENDED] + // [OFFER_ACCEPTED][google.cloud.talent.v4beta1.Application.ApplicationStage.OFFER_ACCEPTED] + // [STARTED][google.cloud.talent.v4beta1.Application.ApplicationStage.STARTED] + // + // In the context of + // [AvailabilityFilter][google.cloud.talent.v4beta1.AvailabilityFilter], the + // filter is applied on + // [Profile.availability_signals][google.cloud.talent.v4beta1.Profile.availability_signals] + // where [type][Profile.availability_signals.type] is CLIENT_SUBMISSION. + CLIENT_SUBMISSION = 4; +} diff --git a/talent/google/cloud/talent_v4beta1/proto/common_pb2.py b/talent/google/cloud/talent_v4beta1/proto/common_pb2.py index 9be63181965b..617c1649876d 100644 --- a/talent/google/cloud/talent_v4beta1/proto/common_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/common_pb2.py @@ -17,6 +17,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 from google.type import date_pb2 as google_dot_type_dot_date__pb2 @@ -34,10 +35,11 @@ "\n\037com.google.cloud.talent.v4beta1B\013CommonProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n.google/cloud/talent_v4beta1/proto/common.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x16google/type/date.proto\x1a\x18google/type/latlng.proto\x1a\x17google/type/money.proto\x1a google/type/postal_address.proto\x1a\x1bgoogle/type/timeofday.proto"n\n\x0eTimestampRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xb7\x03\n\x08Location\x12I\n\rlocation_type\x18\x01 \x01(\x0e\x32\x32.google.cloud.talent.v4beta1.Location.LocationType\x12\x32\n\x0epostal_address\x18\x02 \x01(\x0b\x32\x1a.google.type.PostalAddress\x12$\n\x07lat_lng\x18\x03 \x01(\x0b\x32\x13.google.type.LatLng\x12\x14\n\x0cradius_miles\x18\x04 \x01(\x01"\xef\x01\n\x0cLocationType\x12\x1d\n\x19LOCATION_TYPE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x43OUNTRY\x10\x01\x12\x17\n\x13\x41\x44MINISTRATIVE_AREA\x10\x02\x12\x1b\n\x17SUB_ADMINISTRATIVE_AREA\x10\x03\x12\x0c\n\x08LOCALITY\x10\x04\x12\x0f\n\x0bPOSTAL_CODE\x10\x05\x12\x10\n\x0cSUB_LOCALITY\x10\x06\x12\x12\n\x0eSUB_LOCALITY_1\x10\x07\x12\x12\n\x0eSUB_LOCALITY_2\x10\x08\x12\x10\n\x0cNEIGHBORHOOD\x10\t\x12\x12\n\x0eSTREET_ADDRESS\x10\n"\x9f\x01\n\x0fRequestMetadata\x12\x0e\n\x06\x64omain\x18\x01 \x01(\t\x12\x12\n\nsession_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x19\n\x11\x61llow_missing_ids\x18\x04 \x01(\x08\x12<\n\x0b\x64\x65vice_info\x18\x05 \x01(\x0b\x32\'.google.cloud.talent.v4beta1.DeviceInfo"&\n\x10ResponseMetadata\x12\x12\n\nrequest_id\x18\x01 \x01(\t"\xcf\x01\n\nDeviceInfo\x12G\n\x0b\x64\x65vice_type\x18\x01 \x01(\x0e\x32\x32.google.cloud.talent.v4beta1.DeviceInfo.DeviceType\x12\n\n\x02id\x18\x02 \x01(\t"l\n\nDeviceType\x12\x1b\n\x17\x44\x45VICE_TYPE_UNSPECIFIED\x10\x00\x12\x07\n\x03WEB\x10\x01\x12\x0e\n\nMOBILE_WEB\x10\x02\x12\x0b\n\x07\x41NDROID\x10\x03\x12\x07\n\x03IOS\x10\x04\x12\x07\n\x03\x42OT\x10\x05\x12\t\n\x05OTHER\x10\x06"Q\n\x0f\x43ustomAttribute\x12\x15\n\rstring_values\x18\x01 \x03(\t\x12\x13\n\x0blong_values\x18\x02 \x03(\x03\x12\x12\n\nfilterable\x18\x03 \x01(\x08"W\n\x12SpellingCorrection\x12\x11\n\tcorrected\x18\x01 \x01(\x08\x12\x16\n\x0e\x63orrected_text\x18\x02 \x01(\t\x12\x16\n\x0e\x63orrected_html\x18\x03 \x01(\t"\x9c\t\n\x10\x43ompensationInfo\x12P\n\x07\x65ntries\x18\x01 \x03(\x0b\x32?.google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry\x12k\n"annualized_base_compensation_range\x18\x02 \x01(\x0b\x32?.google.cloud.talent.v4beta1.CompensationInfo.CompensationRange\x12l\n#annualized_total_compensation_range\x18\x03 \x01(\x0b\x32?.google.cloud.talent.v4beta1.CompensationInfo.CompensationRange\x1a\x92\x03\n\x11\x43ompensationEntry\x12L\n\x04type\x18\x01 \x01(\x0e\x32>.google.cloud.talent.v4beta1.CompensationInfo.CompensationType\x12L\n\x04unit\x18\x02 \x01(\x0e\x32>.google.cloud.talent.v4beta1.CompensationInfo.CompensationUnit\x12$\n\x06\x61mount\x18\x03 \x01(\x0b\x32\x12.google.type.MoneyH\x00\x12P\n\x05range\x18\x04 \x01(\x0b\x32?.google.cloud.talent.v4beta1.CompensationInfo.CompensationRangeH\x00\x12\x13\n\x0b\x64\x65scription\x18\x05 \x01(\t\x12=\n\x17\x65xpected_units_per_year\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValueB\x15\n\x13\x63ompensation_amount\x1ao\n\x11\x43ompensationRange\x12,\n\x10max_compensation\x18\x02 \x01(\x0b\x32\x12.google.type.Money\x12,\n\x10min_compensation\x18\x01 \x01(\x0b\x32\x12.google.type.Money"\xb5\x01\n\x10\x43ompensationType\x12!\n\x1d\x43OMPENSATION_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x42\x41SE\x10\x01\x12\t\n\x05\x42ONUS\x10\x02\x12\x11\n\rSIGNING_BONUS\x10\x03\x12\n\n\x06\x45QUITY\x10\x04\x12\x12\n\x0ePROFIT_SHARING\x10\x05\x12\x0f\n\x0b\x43OMMISSIONS\x10\x06\x12\x08\n\x04TIPS\x10\x07\x12\x1b\n\x17OTHER_COMPENSATION_TYPE\x10\x08"\x9c\x01\n\x10\x43ompensationUnit\x12!\n\x1d\x43OMPENSATION_UNIT_UNSPECIFIED\x10\x00\x12\n\n\x06HOURLY\x10\x01\x12\t\n\x05\x44\x41ILY\x10\x02\x12\n\n\x06WEEKLY\x10\x03\x12\x0b\n\x07MONTHLY\x10\x04\x12\n\n\x06YEARLY\x10\x05\x12\x0c\n\x08ONE_TIME\x10\x06\x12\x1b\n\x17OTHER_COMPENSATION_UNIT\x10\x07"\x9e\x01\n\rCertification\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\'\n\x0c\x61\x63quire_date\x18\x02 \x01(\x0b\x32\x11.google.type.Date\x12&\n\x0b\x65xpire_date\x18\x03 \x01(\x0b\x32\x11.google.type.Date\x12\x11\n\tauthority\x18\x04 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x05 \x01(\t"\xb8\x01\n\x05Skill\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12)\n\x0elast_used_date\x18\x02 \x01(\x0b\x32\x11.google.type.Date\x12\x41\n\x05level\x18\x03 \x01(\x0e\x32\x32.google.cloud.talent.v4beta1.SkillProficiencyLevel\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\t\x12\x1a\n\x12skill_name_snippet\x18\x05 \x01(\t"w\n\tInterview\x12\x33\n\x06rating\x18\x06 \x01(\x0b\x32#.google.cloud.talent.v4beta1.Rating\x12\x35\n\x07outcome\x18\x07 \x01(\x0e\x32$.google.cloud.talent.v4beta1.Outcome"E\n\x06Rating\x12\x0f\n\x07overall\x18\x01 \x01(\x01\x12\x0b\n\x03min\x18\x02 \x01(\x01\x12\x0b\n\x03max\x18\x03 \x01(\x01\x12\x10\n\x08interval\x18\x04 \x01(\x01"\xcc\x03\n\x16\x42\x61tchOperationMetadata\x12H\n\x05state\x18\x01 \x01(\x0e\x32\x39.google.cloud.talent.v4beta1.BatchOperationMetadata.State\x12\x19\n\x11state_description\x18\x02 \x01(\t\x12\x15\n\rsuccess_count\x18\x03 \x01(\x05\x12\x15\n\rfailure_count\x18\x04 \x01(\x05\x12\x13\n\x0btotal_count\x18\x05 \x01(\x05\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"z\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x10\n\x0cINITIALIZING\x10\x01\x12\x0e\n\nPROCESSING\x10\x02\x12\r\n\tSUCCEEDED\x10\x03\x12\n\n\x06\x46\x41ILED\x10\x04\x12\x0e\n\nCANCELLING\x10\x05\x12\r\n\tCANCELLED\x10\x06*y\n\x0b\x43ompanySize\x12\x1c\n\x18\x43OMPANY_SIZE_UNSPECIFIED\x10\x00\x12\x08\n\x04MINI\x10\x01\x12\t\n\x05SMALL\x10\x02\x12\x0b\n\x07SMEDIUM\x10\x03\x12\n\n\x06MEDIUM\x10\x04\x12\x07\n\x03\x42IG\x10\x05\x12\n\n\x06\x42IGGER\x10\x06\x12\t\n\x05GIANT\x10\x07*\xe2\x01\n\nJobBenefit\x12\x1b\n\x17JOB_BENEFIT_UNSPECIFIED\x10\x00\x12\x0e\n\nCHILD_CARE\x10\x01\x12\n\n\x06\x44\x45NTAL\x10\x02\x12\x14\n\x10\x44OMESTIC_PARTNER\x10\x03\x12\x12\n\x0e\x46LEXIBLE_HOURS\x10\x04\x12\x0b\n\x07MEDICAL\x10\x05\x12\x12\n\x0eLIFE_INSURANCE\x10\x06\x12\x12\n\x0ePARENTAL_LEAVE\x10\x07\x12\x13\n\x0fRETIREMENT_PLAN\x10\x08\x12\r\n\tSICK_DAYS\x10\t\x12\x0c\n\x08VACATION\x10\n\x12\n\n\x06VISION\x10\x0b*\x8e\x02\n\nDegreeType\x12\x1b\n\x17\x44\x45GREE_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11PRIMARY_EDUCATION\x10\x01\x12\x1d\n\x19LOWER_SECONDARY_EDUCATION\x10\x02\x12\x1d\n\x19UPPER_SECONDARY_EDUCATION\x10\x03\x12\x1c\n\x18\x41\x44ULT_REMEDIAL_EDUCATION\x10\x04\x12\x1c\n\x18\x41SSOCIATES_OR_EQUIVALENT\x10\x05\x12\x1b\n\x17\x42\x41\x43HELORS_OR_EQUIVALENT\x10\x06\x12\x19\n\x15MASTERS_OR_EQUIVALENT\x10\x07\x12\x1a\n\x16\x44OCTORAL_OR_EQUIVALENT\x10\x08*\xdc\x01\n\x0e\x45mploymentType\x12\x1f\n\x1b\x45MPLOYMENT_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tFULL_TIME\x10\x01\x12\r\n\tPART_TIME\x10\x02\x12\x0e\n\nCONTRACTOR\x10\x03\x12\x14\n\x10\x43ONTRACT_TO_HIRE\x10\x04\x12\r\n\tTEMPORARY\x10\x05\x12\n\n\x06INTERN\x10\x06\x12\r\n\tVOLUNTEER\x10\x07\x12\x0c\n\x08PER_DIEM\x10\x08\x12\x12\n\x0e\x46LY_IN_FLY_OUT\x10\t\x12\x19\n\x15OTHER_EMPLOYMENT_TYPE\x10\n*q\n\x08JobLevel\x12\x19\n\x15JOB_LEVEL_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x45NTRY_LEVEL\x10\x01\x12\x0f\n\x0b\x45XPERIENCED\x10\x02\x12\x0b\n\x07MANAGER\x10\x03\x12\x0c\n\x08\x44IRECTOR\x10\x04\x12\r\n\tEXECUTIVE\x10\x05*\xba\x06\n\x0bJobCategory\x12\x1c\n\x18JOB_CATEGORY_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x41\x43\x43OUNTING_AND_FINANCE\x10\x01\x12\x1d\n\x19\x41\x44MINISTRATIVE_AND_OFFICE\x10\x02\x12\x1d\n\x19\x41\x44VERTISING_AND_MARKETING\x10\x03\x12\x0f\n\x0b\x41NIMAL_CARE\x10\x04\x12\x1a\n\x16\x41RT_FASHION_AND_DESIGN\x10\x05\x12\x17\n\x13\x42USINESS_OPERATIONS\x10\x06\x12\x1b\n\x17\x43LEANING_AND_FACILITIES\x10\x07\x12\x13\n\x0f\x43OMPUTER_AND_IT\x10\x08\x12\x10\n\x0c\x43ONSTRUCTION\x10\t\x12\x14\n\x10\x43USTOMER_SERVICE\x10\n\x12\r\n\tEDUCATION\x10\x0b\x12\x1c\n\x18\x45NTERTAINMENT_AND_TRAVEL\x10\x0c\x12\x18\n\x14\x46\x41RMING_AND_OUTDOORS\x10\r\x12\x0e\n\nHEALTHCARE\x10\x0e\x12\x13\n\x0fHUMAN_RESOURCES\x10\x0f\x12\'\n#INSTALLATION_MAINTENANCE_AND_REPAIR\x10\x10\x12\t\n\x05LEGAL\x10\x11\x12\x0e\n\nMANAGEMENT\x10\x12\x12\x1f\n\x1bMANUFACTURING_AND_WAREHOUSE\x10\x13\x12$\n MEDIA_COMMUNICATIONS_AND_WRITING\x10\x14\x12\x16\n\x12OIL_GAS_AND_MINING\x10\x15\x12\x1e\n\x1aPERSONAL_CARE_AND_SERVICES\x10\x16\x12\x17\n\x13PROTECTIVE_SERVICES\x10\x17\x12\x0f\n\x0bREAL_ESTATE\x10\x18\x12\x1e\n\x1aRESTAURANT_AND_HOSPITALITY\x10\x19\x12\x14\n\x10SALES_AND_RETAIL\x10\x1a\x12\x1b\n\x17SCIENCE_AND_ENGINEERING\x10\x1b\x12"\n\x1eSOCIAL_SERVICES_AND_NON_PROFIT\x10\x1c\x12!\n\x1dSPORTS_FITNESS_AND_RECREATION\x10\x1d\x12 \n\x1cTRANSPORTATION_AND_LOGISTICS\x10\x1e*e\n\rPostingRegion\x12\x1e\n\x1aPOSTING_REGION_UNSPECIFIED\x10\x00\x12\x17\n\x13\x41\x44MINISTRATIVE_AREA\x10\x01\x12\n\n\x06NATION\x10\x02\x12\x0f\n\x0bTELECOMMUTE\x10\x03*n\n\nVisibility\x12\x1a\n\x16VISIBILITY_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x41\x43\x43OUNT_ONLY\x10\x01\x12\x16\n\x12SHARED_WITH_GOOGLE\x10\x02\x12\x16\n\x12SHARED_WITH_PUBLIC\x10\x03\x1a\x02\x18\x01*Z\n\x10\x43ontactInfoUsage\x12"\n\x1e\x43ONTACT_INFO_USAGE_UNSPECIFIED\x10\x00\x12\x0c\n\x08PERSONAL\x10\x01\x12\x08\n\x04WORK\x10\x02\x12\n\n\x06SCHOOL\x10\x03*q\n\x10HtmlSanitization\x12!\n\x1dHTML_SANITIZATION_UNSPECIFIED\x10\x00\x12\x1e\n\x1aHTML_SANITIZATION_DISABLED\x10\x01\x12\x1a\n\x16SIMPLE_FORMATTING_ONLY\x10\x02*c\n\rCommuteMethod\x12\x1e\n\x1a\x43OMMUTE_METHOD_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x44RIVING\x10\x01\x12\x0b\n\x07TRANSIT\x10\x02\x12\x0b\n\x07WALKING\x10\x03\x12\x0b\n\x07\x43YCLING\x10\x04*\xa2\x01\n\x15SkillProficiencyLevel\x12\'\n#SKILL_PROFICIENCY_LEVEL_UNSPECIFIED\x10\x00\x12\r\n\tUNSKILLED\x10\x06\x12\x19\n\x15\x46UNDAMENTAL_AWARENESS\x10\x01\x12\n\n\x06NOVICE\x10\x02\x12\x10\n\x0cINTERMEDIATE\x10\x03\x12\x0c\n\x08\x41\x44VANCED\x10\x04\x12\n\n\x06\x45XPERT\x10\x05*f\n\x07Outcome\x12\x17\n\x13OUTCOME_UNSPECIFIED\x10\x00\x12\x0c\n\x08POSITIVE\x10\x01\x12\x0b\n\x07NEUTRAL\x10\x02\x12\x0c\n\x08NEGATIVE\x10\x03\x12\x19\n\x15OUTCOME_NOT_AVAILABLE\x10\x04\x42y\n\x1f\x63om.google.cloud.talent.v4beta1B\x0b\x43ommonProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n.google/cloud/talent_v4beta1/proto/common.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x16google/type/date.proto\x1a\x18google/type/latlng.proto\x1a\x17google/type/money.proto\x1a google/type/postal_address.proto\x1a\x1bgoogle/type/timeofday.proto"n\n\x0eTimestampRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xb7\x03\n\x08Location\x12I\n\rlocation_type\x18\x01 \x01(\x0e\x32\x32.google.cloud.talent.v4beta1.Location.LocationType\x12\x32\n\x0epostal_address\x18\x02 \x01(\x0b\x32\x1a.google.type.PostalAddress\x12$\n\x07lat_lng\x18\x03 \x01(\x0b\x32\x13.google.type.LatLng\x12\x14\n\x0cradius_miles\x18\x04 \x01(\x01"\xef\x01\n\x0cLocationType\x12\x1d\n\x19LOCATION_TYPE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x43OUNTRY\x10\x01\x12\x17\n\x13\x41\x44MINISTRATIVE_AREA\x10\x02\x12\x1b\n\x17SUB_ADMINISTRATIVE_AREA\x10\x03\x12\x0c\n\x08LOCALITY\x10\x04\x12\x0f\n\x0bPOSTAL_CODE\x10\x05\x12\x10\n\x0cSUB_LOCALITY\x10\x06\x12\x12\n\x0eSUB_LOCALITY_1\x10\x07\x12\x12\n\x0eSUB_LOCALITY_2\x10\x08\x12\x10\n\x0cNEIGHBORHOOD\x10\t\x12\x12\n\x0eSTREET_ADDRESS\x10\n"\x9f\x01\n\x0fRequestMetadata\x12\x0e\n\x06\x64omain\x18\x01 \x01(\t\x12\x12\n\nsession_id\x18\x02 \x01(\t\x12\x0f\n\x07user_id\x18\x03 \x01(\t\x12\x19\n\x11\x61llow_missing_ids\x18\x04 \x01(\x08\x12<\n\x0b\x64\x65vice_info\x18\x05 \x01(\x0b\x32\'.google.cloud.talent.v4beta1.DeviceInfo"&\n\x10ResponseMetadata\x12\x12\n\nrequest_id\x18\x01 \x01(\t"\xcf\x01\n\nDeviceInfo\x12G\n\x0b\x64\x65vice_type\x18\x01 \x01(\x0e\x32\x32.google.cloud.talent.v4beta1.DeviceInfo.DeviceType\x12\n\n\x02id\x18\x02 \x01(\t"l\n\nDeviceType\x12\x1b\n\x17\x44\x45VICE_TYPE_UNSPECIFIED\x10\x00\x12\x07\n\x03WEB\x10\x01\x12\x0e\n\nMOBILE_WEB\x10\x02\x12\x0b\n\x07\x41NDROID\x10\x03\x12\x07\n\x03IOS\x10\x04\x12\x07\n\x03\x42OT\x10\x05\x12\t\n\x05OTHER\x10\x06"Q\n\x0f\x43ustomAttribute\x12\x15\n\rstring_values\x18\x01 \x03(\t\x12\x13\n\x0blong_values\x18\x02 \x03(\x03\x12\x12\n\nfilterable\x18\x03 \x01(\x08"W\n\x12SpellingCorrection\x12\x11\n\tcorrected\x18\x01 \x01(\x08\x12\x16\n\x0e\x63orrected_text\x18\x02 \x01(\t\x12\x16\n\x0e\x63orrected_html\x18\x03 \x01(\t"\xa6\t\n\x10\x43ompensationInfo\x12P\n\x07\x65ntries\x18\x01 \x03(\x0b\x32?.google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry\x12p\n"annualized_base_compensation_range\x18\x02 \x01(\x0b\x32?.google.cloud.talent.v4beta1.CompensationInfo.CompensationRangeB\x03\xe0\x41\x03\x12q\n#annualized_total_compensation_range\x18\x03 \x01(\x0b\x32?.google.cloud.talent.v4beta1.CompensationInfo.CompensationRangeB\x03\xe0\x41\x03\x1a\x92\x03\n\x11\x43ompensationEntry\x12L\n\x04type\x18\x01 \x01(\x0e\x32>.google.cloud.talent.v4beta1.CompensationInfo.CompensationType\x12L\n\x04unit\x18\x02 \x01(\x0e\x32>.google.cloud.talent.v4beta1.CompensationInfo.CompensationUnit\x12$\n\x06\x61mount\x18\x03 \x01(\x0b\x32\x12.google.type.MoneyH\x00\x12P\n\x05range\x18\x04 \x01(\x0b\x32?.google.cloud.talent.v4beta1.CompensationInfo.CompensationRangeH\x00\x12\x13\n\x0b\x64\x65scription\x18\x05 \x01(\t\x12=\n\x17\x65xpected_units_per_year\x18\x06 \x01(\x0b\x32\x1c.google.protobuf.DoubleValueB\x15\n\x13\x63ompensation_amount\x1ao\n\x11\x43ompensationRange\x12,\n\x10max_compensation\x18\x02 \x01(\x0b\x32\x12.google.type.Money\x12,\n\x10min_compensation\x18\x01 \x01(\x0b\x32\x12.google.type.Money"\xb5\x01\n\x10\x43ompensationType\x12!\n\x1d\x43OMPENSATION_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x42\x41SE\x10\x01\x12\t\n\x05\x42ONUS\x10\x02\x12\x11\n\rSIGNING_BONUS\x10\x03\x12\n\n\x06\x45QUITY\x10\x04\x12\x12\n\x0ePROFIT_SHARING\x10\x05\x12\x0f\n\x0b\x43OMMISSIONS\x10\x06\x12\x08\n\x04TIPS\x10\x07\x12\x1b\n\x17OTHER_COMPENSATION_TYPE\x10\x08"\x9c\x01\n\x10\x43ompensationUnit\x12!\n\x1d\x43OMPENSATION_UNIT_UNSPECIFIED\x10\x00\x12\n\n\x06HOURLY\x10\x01\x12\t\n\x05\x44\x41ILY\x10\x02\x12\n\n\x06WEEKLY\x10\x03\x12\x0b\n\x07MONTHLY\x10\x04\x12\n\n\x06YEARLY\x10\x05\x12\x0c\n\x08ONE_TIME\x10\x06\x12\x1b\n\x17OTHER_COMPENSATION_UNIT\x10\x07"\x9e\x01\n\rCertification\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\'\n\x0c\x61\x63quire_date\x18\x02 \x01(\x0b\x32\x11.google.type.Date\x12&\n\x0b\x65xpire_date\x18\x03 \x01(\x0b\x32\x11.google.type.Date\x12\x11\n\tauthority\x18\x04 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x05 \x01(\t"\xbd\x01\n\x05Skill\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12)\n\x0elast_used_date\x18\x02 \x01(\x0b\x32\x11.google.type.Date\x12\x41\n\x05level\x18\x03 \x01(\x0e\x32\x32.google.cloud.talent.v4beta1.SkillProficiencyLevel\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\t\x12\x1f\n\x12skill_name_snippet\x18\x05 \x01(\tB\x03\xe0\x41\x03"|\n\tInterview\x12\x33\n\x06rating\x18\x06 \x01(\x0b\x32#.google.cloud.talent.v4beta1.Rating\x12:\n\x07outcome\x18\x07 \x01(\x0e\x32$.google.cloud.talent.v4beta1.OutcomeB\x03\xe0\x41\x02"E\n\x06Rating\x12\x0f\n\x07overall\x18\x01 \x01(\x01\x12\x0b\n\x03min\x18\x02 \x01(\x01\x12\x0b\n\x03max\x18\x03 \x01(\x01\x12\x10\n\x08interval\x18\x04 \x01(\x01"\xcc\x03\n\x16\x42\x61tchOperationMetadata\x12H\n\x05state\x18\x01 \x01(\x0e\x32\x39.google.cloud.talent.v4beta1.BatchOperationMetadata.State\x12\x19\n\x11state_description\x18\x02 \x01(\t\x12\x15\n\rsuccess_count\x18\x03 \x01(\x05\x12\x15\n\rfailure_count\x18\x04 \x01(\x05\x12\x13\n\x0btotal_count\x18\x05 \x01(\x05\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"z\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x10\n\x0cINITIALIZING\x10\x01\x12\x0e\n\nPROCESSING\x10\x02\x12\r\n\tSUCCEEDED\x10\x03\x12\n\n\x06\x46\x41ILED\x10\x04\x12\x0e\n\nCANCELLING\x10\x05\x12\r\n\tCANCELLED\x10\x06*y\n\x0b\x43ompanySize\x12\x1c\n\x18\x43OMPANY_SIZE_UNSPECIFIED\x10\x00\x12\x08\n\x04MINI\x10\x01\x12\t\n\x05SMALL\x10\x02\x12\x0b\n\x07SMEDIUM\x10\x03\x12\n\n\x06MEDIUM\x10\x04\x12\x07\n\x03\x42IG\x10\x05\x12\n\n\x06\x42IGGER\x10\x06\x12\t\n\x05GIANT\x10\x07*\xe2\x01\n\nJobBenefit\x12\x1b\n\x17JOB_BENEFIT_UNSPECIFIED\x10\x00\x12\x0e\n\nCHILD_CARE\x10\x01\x12\n\n\x06\x44\x45NTAL\x10\x02\x12\x14\n\x10\x44OMESTIC_PARTNER\x10\x03\x12\x12\n\x0e\x46LEXIBLE_HOURS\x10\x04\x12\x0b\n\x07MEDICAL\x10\x05\x12\x12\n\x0eLIFE_INSURANCE\x10\x06\x12\x12\n\x0ePARENTAL_LEAVE\x10\x07\x12\x13\n\x0fRETIREMENT_PLAN\x10\x08\x12\r\n\tSICK_DAYS\x10\t\x12\x0c\n\x08VACATION\x10\n\x12\n\n\x06VISION\x10\x0b*\x8e\x02\n\nDegreeType\x12\x1b\n\x17\x44\x45GREE_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11PRIMARY_EDUCATION\x10\x01\x12\x1d\n\x19LOWER_SECONDARY_EDUCATION\x10\x02\x12\x1d\n\x19UPPER_SECONDARY_EDUCATION\x10\x03\x12\x1c\n\x18\x41\x44ULT_REMEDIAL_EDUCATION\x10\x04\x12\x1c\n\x18\x41SSOCIATES_OR_EQUIVALENT\x10\x05\x12\x1b\n\x17\x42\x41\x43HELORS_OR_EQUIVALENT\x10\x06\x12\x19\n\x15MASTERS_OR_EQUIVALENT\x10\x07\x12\x1a\n\x16\x44OCTORAL_OR_EQUIVALENT\x10\x08*\xdc\x01\n\x0e\x45mploymentType\x12\x1f\n\x1b\x45MPLOYMENT_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tFULL_TIME\x10\x01\x12\r\n\tPART_TIME\x10\x02\x12\x0e\n\nCONTRACTOR\x10\x03\x12\x14\n\x10\x43ONTRACT_TO_HIRE\x10\x04\x12\r\n\tTEMPORARY\x10\x05\x12\n\n\x06INTERN\x10\x06\x12\r\n\tVOLUNTEER\x10\x07\x12\x0c\n\x08PER_DIEM\x10\x08\x12\x12\n\x0e\x46LY_IN_FLY_OUT\x10\t\x12\x19\n\x15OTHER_EMPLOYMENT_TYPE\x10\n*q\n\x08JobLevel\x12\x19\n\x15JOB_LEVEL_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x45NTRY_LEVEL\x10\x01\x12\x0f\n\x0b\x45XPERIENCED\x10\x02\x12\x0b\n\x07MANAGER\x10\x03\x12\x0c\n\x08\x44IRECTOR\x10\x04\x12\r\n\tEXECUTIVE\x10\x05*\xba\x06\n\x0bJobCategory\x12\x1c\n\x18JOB_CATEGORY_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x41\x43\x43OUNTING_AND_FINANCE\x10\x01\x12\x1d\n\x19\x41\x44MINISTRATIVE_AND_OFFICE\x10\x02\x12\x1d\n\x19\x41\x44VERTISING_AND_MARKETING\x10\x03\x12\x0f\n\x0b\x41NIMAL_CARE\x10\x04\x12\x1a\n\x16\x41RT_FASHION_AND_DESIGN\x10\x05\x12\x17\n\x13\x42USINESS_OPERATIONS\x10\x06\x12\x1b\n\x17\x43LEANING_AND_FACILITIES\x10\x07\x12\x13\n\x0f\x43OMPUTER_AND_IT\x10\x08\x12\x10\n\x0c\x43ONSTRUCTION\x10\t\x12\x14\n\x10\x43USTOMER_SERVICE\x10\n\x12\r\n\tEDUCATION\x10\x0b\x12\x1c\n\x18\x45NTERTAINMENT_AND_TRAVEL\x10\x0c\x12\x18\n\x14\x46\x41RMING_AND_OUTDOORS\x10\r\x12\x0e\n\nHEALTHCARE\x10\x0e\x12\x13\n\x0fHUMAN_RESOURCES\x10\x0f\x12\'\n#INSTALLATION_MAINTENANCE_AND_REPAIR\x10\x10\x12\t\n\x05LEGAL\x10\x11\x12\x0e\n\nMANAGEMENT\x10\x12\x12\x1f\n\x1bMANUFACTURING_AND_WAREHOUSE\x10\x13\x12$\n MEDIA_COMMUNICATIONS_AND_WRITING\x10\x14\x12\x16\n\x12OIL_GAS_AND_MINING\x10\x15\x12\x1e\n\x1aPERSONAL_CARE_AND_SERVICES\x10\x16\x12\x17\n\x13PROTECTIVE_SERVICES\x10\x17\x12\x0f\n\x0bREAL_ESTATE\x10\x18\x12\x1e\n\x1aRESTAURANT_AND_HOSPITALITY\x10\x19\x12\x14\n\x10SALES_AND_RETAIL\x10\x1a\x12\x1b\n\x17SCIENCE_AND_ENGINEERING\x10\x1b\x12"\n\x1eSOCIAL_SERVICES_AND_NON_PROFIT\x10\x1c\x12!\n\x1dSPORTS_FITNESS_AND_RECREATION\x10\x1d\x12 \n\x1cTRANSPORTATION_AND_LOGISTICS\x10\x1e*e\n\rPostingRegion\x12\x1e\n\x1aPOSTING_REGION_UNSPECIFIED\x10\x00\x12\x17\n\x13\x41\x44MINISTRATIVE_AREA\x10\x01\x12\n\n\x06NATION\x10\x02\x12\x0f\n\x0bTELECOMMUTE\x10\x03*n\n\nVisibility\x12\x1a\n\x16VISIBILITY_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x41\x43\x43OUNT_ONLY\x10\x01\x12\x16\n\x12SHARED_WITH_GOOGLE\x10\x02\x12\x16\n\x12SHARED_WITH_PUBLIC\x10\x03\x1a\x02\x18\x01*Z\n\x10\x43ontactInfoUsage\x12"\n\x1e\x43ONTACT_INFO_USAGE_UNSPECIFIED\x10\x00\x12\x0c\n\x08PERSONAL\x10\x01\x12\x08\n\x04WORK\x10\x02\x12\n\n\x06SCHOOL\x10\x03*q\n\x10HtmlSanitization\x12!\n\x1dHTML_SANITIZATION_UNSPECIFIED\x10\x00\x12\x1e\n\x1aHTML_SANITIZATION_DISABLED\x10\x01\x12\x1a\n\x16SIMPLE_FORMATTING_ONLY\x10\x02*c\n\rCommuteMethod\x12\x1e\n\x1a\x43OMMUTE_METHOD_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x44RIVING\x10\x01\x12\x0b\n\x07TRANSIT\x10\x02\x12\x0b\n\x07WALKING\x10\x03\x12\x0b\n\x07\x43YCLING\x10\x04*\xa2\x01\n\x15SkillProficiencyLevel\x12\'\n#SKILL_PROFICIENCY_LEVEL_UNSPECIFIED\x10\x00\x12\r\n\tUNSKILLED\x10\x06\x12\x19\n\x15\x46UNDAMENTAL_AWARENESS\x10\x01\x12\n\n\x06NOVICE\x10\x02\x12\x10\n\x0cINTERMEDIATE\x10\x03\x12\x0c\n\x08\x41\x44VANCED\x10\x04\x12\n\n\x06\x45XPERT\x10\x05*f\n\x07Outcome\x12\x17\n\x13OUTCOME_UNSPECIFIED\x10\x00\x12\x0c\n\x08POSITIVE\x10\x01\x12\x0b\n\x07NEUTRAL\x10\x02\x12\x0c\n\x08NEGATIVE\x10\x03\x12\x19\n\x15OUTCOME_NOT_AVAILABLE\x10\x04*\x97\x01\n\x16\x41vailabilitySignalType\x12(\n$AVAILABILITY_SIGNAL_TYPE_UNSPECIFIED\x10\x00\x12\x13\n\x0fJOB_APPLICATION\x10\x01\x12\x11\n\rRESUME_UPDATE\x10\x02\x12\x14\n\x10\x43\x41NDIDATE_UPDATE\x10\x03\x12\x15\n\x11\x43LIENT_SUBMISSION\x10\x04\x42y\n\x1f\x63om.google.cloud.talent.v4beta1B\x0b\x43ommonProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, google_dot_type_dot_date__pb2.DESCRIPTOR, @@ -85,8 +87,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3636, - serialized_end=3757, + serialized_start=3689, + serialized_end=3810, ) _sym_db.RegisterEnumDescriptor(_COMPANYSIZE) @@ -148,8 +150,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3760, - serialized_end=3986, + serialized_start=3813, + serialized_end=4039, ) _sym_db.RegisterEnumDescriptor(_JOBBENEFIT) @@ -226,8 +228,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3989, - serialized_end=4259, + serialized_start=4042, + serialized_end=4312, ) _sym_db.RegisterEnumDescriptor(_DEGREETYPE) @@ -286,8 +288,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4262, - serialized_end=4482, + serialized_start=4315, + serialized_end=4535, ) _sym_db.RegisterEnumDescriptor(_EMPLOYMENTTYPE) @@ -323,8 +325,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4484, - serialized_end=4597, + serialized_start=4537, + serialized_end=4650, ) _sym_db.RegisterEnumDescriptor(_JOBLEVEL) @@ -527,8 +529,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4600, - serialized_end=5426, + serialized_start=4653, + serialized_end=5479, ) _sym_db.RegisterEnumDescriptor(_JOBCATEGORY) @@ -562,8 +564,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5428, - serialized_end=5529, + serialized_start=5481, + serialized_end=5582, ) _sym_db.RegisterEnumDescriptor(_POSTINGREGION) @@ -601,8 +603,8 @@ ], containing_type=None, serialized_options=_b("\030\001"), - serialized_start=5531, - serialized_end=5641, + serialized_start=5584, + serialized_end=5694, ) _sym_db.RegisterEnumDescriptor(_VISIBILITY) @@ -632,8 +634,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5643, - serialized_end=5733, + serialized_start=5696, + serialized_end=5786, ) _sym_db.RegisterEnumDescriptor(_CONTACTINFOUSAGE) @@ -668,8 +670,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5735, - serialized_end=5848, + serialized_start=5788, + serialized_end=5901, ) _sym_db.RegisterEnumDescriptor(_HTMLSANITIZATION) @@ -702,8 +704,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5850, - serialized_end=5949, + serialized_start=5903, + serialized_end=6002, ) _sym_db.RegisterEnumDescriptor(_COMMUTEMETHOD) @@ -746,8 +748,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5952, - serialized_end=6114, + serialized_start=6005, + serialized_end=6167, ) _sym_db.RegisterEnumDescriptor(_SKILLPROFICIENCYLEVEL) @@ -784,12 +786,58 @@ ], containing_type=None, serialized_options=None, - serialized_start=6116, - serialized_end=6218, + serialized_start=6169, + serialized_end=6271, ) _sym_db.RegisterEnumDescriptor(_OUTCOME) Outcome = enum_type_wrapper.EnumTypeWrapper(_OUTCOME) +_AVAILABILITYSIGNALTYPE = _descriptor.EnumDescriptor( + name="AvailabilitySignalType", + full_name="google.cloud.talent.v4beta1.AvailabilitySignalType", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="AVAILABILITY_SIGNAL_TYPE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="JOB_APPLICATION", + index=1, + number=1, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="RESUME_UPDATE", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANDIDATE_UPDATE", + index=3, + number=3, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="CLIENT_SUBMISSION", + index=4, + number=4, + serialized_options=None, + type=None, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=6274, + serialized_end=6425, +) +_sym_db.RegisterEnumDescriptor(_AVAILABILITYSIGNALTYPE) + +AvailabilitySignalType = enum_type_wrapper.EnumTypeWrapper(_AVAILABILITYSIGNALTYPE) COMPANY_SIZE_UNSPECIFIED = 0 MINI = 1 SMALL = 2 @@ -899,6 +947,11 @@ NEUTRAL = 2 NEGATIVE = 3 OUTCOME_NOT_AVAILABLE = 4 +AVAILABILITY_SIGNAL_TYPE_UNSPECIFIED = 0 +JOB_APPLICATION = 1 +RESUME_UPDATE = 2 +CANDIDATE_UPDATE = 3 +CLIENT_SUBMISSION = 4 _LOCATION_LOCATIONTYPE = _descriptor.EnumDescriptor( @@ -959,8 +1012,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=625, - serialized_end=864, + serialized_start=658, + serialized_end=897, ) _sym_db.RegisterEnumDescriptor(_LOCATION_LOCATIONTYPE) @@ -998,8 +1051,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1168, - serialized_end=1276, + serialized_start=1201, + serialized_end=1309, ) _sym_db.RegisterEnumDescriptor(_DEVICEINFO_DEVICETYPE) @@ -1047,8 +1100,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2291, - serialized_end=2472, + serialized_start=2334, + serialized_end=2515, ) _sym_db.RegisterEnumDescriptor(_COMPENSATIONINFO_COMPENSATIONTYPE) @@ -1093,8 +1146,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2475, - serialized_end=2631, + serialized_start=2518, + serialized_end=2674, ) _sym_db.RegisterEnumDescriptor(_COMPENSATIONINFO_COMPENSATIONUNIT) @@ -1132,8 +1185,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3512, - serialized_end=3634, + serialized_start=3565, + serialized_end=3687, ) _sym_db.RegisterEnumDescriptor(_BATCHOPERATIONMETADATA_STATE) @@ -1190,8 +1243,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=312, - serialized_end=422, + serialized_start=345, + serialized_end=455, ) @@ -1283,8 +1336,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=425, - serialized_end=864, + serialized_start=458, + serialized_end=897, ) @@ -1394,8 +1447,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=867, - serialized_end=1026, + serialized_start=900, + serialized_end=1059, ) @@ -1433,8 +1486,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1028, - serialized_end=1066, + serialized_start=1061, + serialized_end=1099, ) @@ -1490,8 +1543,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1069, - serialized_end=1276, + serialized_start=1102, + serialized_end=1309, ) @@ -1565,8 +1618,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1278, - serialized_end=1359, + serialized_start=1311, + serialized_end=1392, ) @@ -1640,8 +1693,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1361, - serialized_end=1448, + serialized_start=1394, + serialized_end=1481, ) @@ -1777,8 +1830,8 @@ fields=[], ) ], - serialized_start=1773, - serialized_end=2175, + serialized_start=1816, + serialized_end=2218, ) _COMPENSATIONINFO_COMPENSATIONRANGE = _descriptor.Descriptor( @@ -1833,8 +1886,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2177, - serialized_end=2288, + serialized_start=2220, + serialized_end=2331, ) _COMPENSATIONINFO = _descriptor.Descriptor( @@ -1877,7 +1930,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1895,7 +1948,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1910,8 +1963,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1451, - serialized_end=2631, + serialized_start=1484, + serialized_end=2674, ) @@ -2021,8 +2074,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2634, - serialized_end=2792, + serialized_start=2677, + serialized_end=2835, ) @@ -2120,7 +2173,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2132,8 +2185,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2795, - serialized_end=2979, + serialized_start=2838, + serialized_end=3027, ) @@ -2177,7 +2230,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2189,8 +2242,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2981, - serialized_end=3100, + serialized_start=3029, + serialized_end=3153, ) @@ -2282,8 +2335,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3102, - serialized_end=3171, + serialized_start=3155, + serialized_end=3224, ) @@ -2447,8 +2500,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3174, - serialized_end=3634, + serialized_start=3227, + serialized_end=3687, ) _TIMESTAMPRANGE.fields_by_name[ @@ -2569,6 +2622,7 @@ DESCRIPTOR.enum_types_by_name["CommuteMethod"] = _COMMUTEMETHOD DESCRIPTOR.enum_types_by_name["SkillProficiencyLevel"] = _SKILLPROFICIENCYLEVEL DESCRIPTOR.enum_types_by_name["Outcome"] = _OUTCOME +DESCRIPTOR.enum_types_by_name["AvailabilitySignalType"] = _AVAILABILITYSIGNALTYPE _sym_db.RegisterFileDescriptor(DESCRIPTOR) TimestampRange = _reflection.GeneratedProtocolMessageType( @@ -2597,8 +2651,7 @@ dict( DESCRIPTOR=_LOCATION, __module__="google.cloud.talent_v4beta1.proto.common_pb2", - __doc__="""Output only. A resource that represents a location with full geographic - information. + __doc__="""A resource that represents a location with full geographic information. Attributes: @@ -2637,9 +2690,7 @@ dict( DESCRIPTOR=_REQUESTMETADATA, __module__="google.cloud.talent_v4beta1.proto.common_pb2", - __doc__="""Input only. - - Meta information related to the job searcher or entity conducting the + __doc__="""Meta information related to the job searcher or entity conducting the job search. This information is used to improve the performance of the service. @@ -2679,20 +2730,19 @@ tenant site rely on this field being set correctly to a unique user ID. The maximum number of allowed characters is 255. allow_missing_ids: - Optional. If set to ``true``, + Only set when any of [domain][google.cloud.talent.v4beta1.RequestMetadata.domain], [session\_id][google.cloud.talent.v4beta1.RequestMetadata.sess ion\_id] and [user\_id][google.cloud.talent.v4beta1.RequestMet - adata.user\_id] are optional. Only set when any of these - fields isn't available for some reason. It is highly + adata.user\_id] isn't available for some reason. It is highly recommended not to set this field and provide accurate [domain][google.cloud.talent.v4beta1.RequestMetadata.domain], [session\_id][google.cloud.talent.v4beta1.RequestMetadata.sess ion\_id] and [user\_id][google.cloud.talent.v4beta1.RequestMet adata.user\_id] for the best service experience. device_info: - Optional. The type of device used by the job seeker at the - time of the call to the service. + The type of device used by the job seeker at the time of the + call to the service. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.RequestMetadata) ), @@ -2705,8 +2755,8 @@ dict( DESCRIPTOR=_RESPONSEMETADATA, __module__="google.cloud.talent_v4beta1.proto.common_pb2", - __doc__="""Output only. Additional information returned to client, such as - debugging information. + __doc__="""Additional information returned to client, such as debugging + information. Attributes: @@ -2732,10 +2782,10 @@ Attributes: device_type: - Optional. Type of the device. + Type of the device. id: - Optional. A device-specific ID. The ID must be a unique - identifier that distinguishes the device from other devices. + A device-specific ID. The ID must be a unique identifier that + distinguishes the device from other devices. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.DeviceInfo) ), @@ -2753,10 +2803,10 @@ Attributes: string_values: - Optional but exactly one of [string\_values][google.cloud.tale - nt.v4beta1.CustomAttribute.string\_values] or [long\_values][g - oogle.cloud.talent.v4beta1.CustomAttribute.long\_values] must - be specified. This field is used to perform a string match + Exactly one of [string\_values][google.cloud.talent.v4beta1.Cu + stomAttribute.string\_values] or [long\_values][google.cloud.t + alent.v4beta1.CustomAttribute.long\_values] must be specified. + This field is used to perform a string match (``CASE_SENSITIVE_MATCH`` or ``CASE_INSENSITIVE_MATCH``) search. For filterable ``string_value``\ s, a maximum total number of 200 values is allowed, with each ``string_value`` @@ -2764,18 +2814,17 @@ ``string_values``, the maximum total byte size of unfilterable ``string_values`` is 50KB. Empty string isn't allowed. long_values: - Optional but exactly one of [string\_values][google.cloud.tale - nt.v4beta1.CustomAttribute.string\_values] or [long\_values][g - oogle.cloud.talent.v4beta1.CustomAttribute.long\_values] must - be specified. This field is used to perform number range - search. (``EQ``, ``GT``, ``GE``, ``LE``, ``LT``) over - filterable ``long_value``. Currently at most 1 [long\_values] - [google.cloud.talent.v4beta1.CustomAttribute.long\_values] is - supported. + Exactly one of [string\_values][google.cloud.talent.v4beta1.Cu + stomAttribute.string\_values] or [long\_values][google.cloud.t + alent.v4beta1.CustomAttribute.long\_values] must be specified. + This field is used to perform number range search. (``EQ``, + ``GT``, ``GE``, ``LE``, ``LT``) over filterable + ``long_value``. Currently at most 1 [long\_values][google.clo + ud.talent.v4beta1.CustomAttribute.long\_values] is supported. filterable: - Optional. If the ``filterable`` flag is true, custom field - values are searchable. If false, values are not searchable. - Default is false. + If the ``filterable`` flag is true, custom field values are + searchable. If false, values are not searchable. Default is + false. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.CustomAttribute) ), @@ -2788,7 +2837,7 @@ dict( DESCRIPTOR=_SPELLINGCORRECTION, __module__="google.cloud.talent_v4beta1.proto.common_pb2", - __doc__="""Output only. Spell check result. + __doc__="""Spell check result. Attributes: @@ -2839,33 +2888,31 @@ Attributes: type: - Optional. Compensation type. Default is [CompensationType.COM - PENSATION\_TYPE\_UNSPECIFIED][google.cloud.talent.v4beta1.Comp - ensationInfo.CompensationType.COMPENSATION\_TYPE\_UNSPECIFIED] - . + Compensation type. Default is [CompensationType.COMPENSATION\ + _TYPE\_UNSPECIFIED][google.cloud.talent.v4beta1.CompensationIn + fo.CompensationType.COMPENSATION\_TYPE\_UNSPECIFIED]. unit: - Optional. Frequency of the specified amount. Default is [Comp - ensationUnit.COMPENSATION\_UNIT\_UNSPECIFIED][google.cloud.tal - ent.v4beta1.CompensationInfo.CompensationUnit.COMPENSATION\_UN - IT\_UNSPECIFIED]. + Frequency of the specified amount. Default is [CompensationUn + it.COMPENSATION\_UNIT\_UNSPECIFIED][google.cloud.talent.v4beta + 1.CompensationInfo.CompensationUnit.COMPENSATION\_UNIT\_UNSPEC + IFIED]. compensation_amount: - Optional. Compensation amount. It could be a fixed amount or - a floating range. + Compensation amount. It could be a fixed amount or a floating + range. amount: - Optional. Compensation amount. + Compensation amount. range: - Optional. Compensation range. + Compensation range. description: - Optional. Compensation description. For example, could - indicate equity terms or provide additional context to an - estimated bonus. + Compensation description. For example, could indicate equity + terms or provide additional context to an estimated bonus. expected_units_per_year: - Optional. Expected number of units paid each year. If not - specified, when [Job.employment\_types][google.cloud.talent.v4 - beta1.Job.employment\_types] is FULLTIME, a default value is - inferred based on [unit][google.cloud.talent.v4beta1.Compensat - ionInfo.CompensationEntry.unit]. Default values: - HOURLY: - 2080 - DAILY: 260 - WEEKLY: 52 - MONTHLY: 12 - ANNUAL: 1 + Expected number of units paid each year. If not specified, + when [Job.employment\_types][google.cloud.talent.v4beta1.Job.e + mployment\_types] is FULLTIME, a default value is inferred + based on [unit][google.cloud.talent.v4beta1.CompensationInfo.C + ompensationEntry.unit]. Default values: - HOURLY: 2080 - + DAILY: 260 - WEEKLY: 52 - MONTHLY: 12 - ANNUAL: 1 """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.CompensationInfo.CompensationEntry) ), @@ -2881,14 +2928,14 @@ Attributes: max_compensation: - Optional. The maximum amount of compensation. If left empty, - the value is set to a maximal compensation value and the - currency code is set to match the [currency + The maximum amount of compensation. If left empty, the value + is set to a maximal compensation value and the currency code + is set to match the [currency code][google.type.Money.currency\_code] of min\_compensation. min_compensation: - Optional. The minimum amount of compensation. If left empty, - the value is set to zero and the currency code is set to match - the [currency code][google.type.Money.currency\_code] of + The minimum amount of compensation. If left empty, the value + is set to zero and the currency code is set to match the + [currency code][google.type.Money.currency\_code] of max\_compensation. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.CompensationInfo.CompensationRange) @@ -2901,10 +2948,10 @@ Attributes: entries: - Optional. Job compensation information. At most one entry can - be of type [CompensationInfo.CompensationType.BASE][google.clo - ud.talent.v4beta1.CompensationInfo.CompensationType.BASE], - which is referred as **base compensation entry** for the job. + Job compensation information. At most one entry can be of + type [CompensationInfo.CompensationType.BASE][google.cloud.tal + ent.v4beta1.CompensationInfo.CompensationType.BASE], which is + referred as **base compensation entry** for the job. annualized_base_compensation_range: Output only. Annualized base compensation range. Computed as base compensation entry's [CompensationEntry.amount][google.cl @@ -2942,19 +2989,19 @@ Attributes: display_name: - Optional. Name of license or certification. Number of - characters allowed is 100. + Name of license or certification. Number of characters + allowed is 100. acquire_date: - Optional. Acquisition date or effective date of license or + Acquisition date or effective date of license or certification. expire_date: - Optional. Expiration date of license of certification. + Expiration date of license of certification. authority: - Optional. Authority of license, such as government. Number of + Authority of license, such as government. Number of characters allowed is 100. description: - Optional. Description of license or certification. Number of - characters allowed is 100,000. + Description of license or certification. Number of characters + allowed is 100,000. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.Certification) ), @@ -2972,16 +3019,16 @@ Attributes: display_name: - Optional. Skill display name. For example, "Java", "Python". - Number of characters allowed is 100. + Skill display name. For example, "Java", "Python". Number of + characters allowed is 100. last_used_date: - Optional. The last time this skill was used. + The last time this skill was used. level: - Optional. Skill proficiency level which indicates how - proficient the candidate is at this skill. + Skill proficiency level which indicates how proficient the + candidate is at this skill. context: - Optional. A paragraph describes context of this skill. Number - of characters allowed is 100,000. + A paragraph describes context of this skill. Number of + characters allowed is 100,000. skill_name_snippet: Output only. Skill name snippet shows how the [display\_name][ google.cloud.talent.v4beta1.Skill.display\_name] is related to @@ -3005,7 +3052,7 @@ Attributes: rating: - Optional. The rating on this interview. + The rating on this interview. outcome: Required. The overall decision resulting from this interview (positive, negative, nuetral). @@ -3046,9 +3093,7 @@ dict( DESCRIPTOR=_BATCHOPERATIONMETADATA, __module__="google.cloud.talent_v4beta1.proto.common_pb2", - __doc__="""Output only. - - Metadata used for long running operations returned by CTS batch APIs. + __doc__="""Metadata used for long running operations returned by CTS batch APIs. It's used to replace [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata]. @@ -3084,4 +3129,8 @@ DESCRIPTOR._options = None _VISIBILITY._options = None +_COMPENSATIONINFO.fields_by_name["annualized_base_compensation_range"]._options = None +_COMPENSATIONINFO.fields_by_name["annualized_total_compensation_range"]._options = None +_SKILL.fields_by_name["skill_name_snippet"]._options = None +_INTERVIEW.fields_by_name["outcome"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/talent/google/cloud/talent_v4beta1/proto/company.proto b/talent/google/cloud/talent_v4beta1/proto/company.proto index 16053709db0d..d6543d0e688d 100644 --- a/talent/google/cloud/talent_v4beta1/proto/company.proto +++ b/talent/google/cloud/talent_v4beta1/proto/company.proto @@ -18,6 +18,7 @@ syntax = "proto3"; package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; import "google/cloud/talent/v4beta1/common.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent"; @@ -45,58 +46,58 @@ message Company { // // The format is // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", for - // example, "projects/api-test-project/tenants/foo/companies/bar". + // example, "projects/foo/tenants/bar/companies/baz". // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project/companies/bar". + // If tenant id is unspecified, the default tenant is used. For + // example, "projects/foo/companies/bar". string name = 1; // Required. The display name of the company, for example, "Google LLC". - string display_name = 2; + string display_name = 2 [(google.api.field_behavior) = REQUIRED]; // Required. Client side company identifier, used to uniquely identify the // company. // // The maximum number of allowed characters is 255. - string external_id = 3; + string external_id = 3 [(google.api.field_behavior) = REQUIRED]; - // Optional. The employer's company size. + // The employer's company size. CompanySize size = 4; - // Optional. The street address of the company's main headquarters, which may - // be different from the job location. The service attempts to geolocate the - // provided address, and populates a more specific location wherever possible - // in + // The street address of the company's main headquarters, which may be + // different from the job location. The service attempts + // to geolocate the provided address, and populates a more specific + // location wherever possible in // [DerivedInfo.headquarters_location][google.cloud.talent.v4beta1.Company.DerivedInfo.headquarters_location]. string headquarters_address = 5; - // Optional. Set to true if it is the hiring agency that post jobs for other + // Set to true if it is the hiring agency that post jobs for other // employers. // // Defaults to false if not provided. bool hiring_agency = 6; - // Optional. Equal Employment Opportunity legal disclaimer text to be + // Equal Employment Opportunity legal disclaimer text to be // associated with all jobs, and typically to be displayed in all // roles. // // The maximum number of allowed characters is 500. string eeo_text = 7; - // Optional. The URI representing the company's primary web site or home page, + // The URI representing the company's primary web site or home page, // for example, "https://www.google.com". // // The maximum number of allowed characters is 255. string website_uri = 8; - // Optional. The URI to employer's career site or careers page on the - // employer's web site, for example, "https://careers.google.com". + // The URI to employer's career site or careers page on the employer's web + // site, for example, "https://careers.google.com". string career_site_uri = 9; - // Optional. A URI that hosts the employer's company logo. + // A URI that hosts the employer's company logo. string image_uri = 10; - // Optional. A list of keys of filterable + // A list of keys of filterable // [Job.custom_attributes][google.cloud.talent.v4beta1.Job.custom_attributes], // whose corresponding `string_values` are used in keyword searches. Jobs with // `string_values` under these specified field keys are returned if any @@ -106,10 +107,10 @@ message Company { repeated string keyword_searchable_job_custom_attributes = 11; // Output only. Derived details about the company. - DerivedInfo derived_info = 12; + DerivedInfo derived_info = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Indicates whether a company is flagged to be suspended from // public availability by the service when job content appears suspicious, // abusive, or spammy. - bool suspended = 13; + bool suspended = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/talent/google/cloud/talent_v4beta1/proto/company_pb2.py b/talent/google/cloud/talent_v4beta1/proto/company_pb2.py index 1095015570b1..07d9bfd43da0 100644 --- a/talent/google/cloud/talent_v4beta1/proto/company_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/company_pb2.py @@ -16,6 +16,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.talent_v4beta1.proto import ( common_pb2 as google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2, ) @@ -29,10 +30,11 @@ "\n\037com.google.cloud.talent.v4beta1B\024CompanyResourceProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n/google/cloud/talent_v4beta1/proto/company.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto"\xe4\x03\n\x07\x43ompany\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x65xternal_id\x18\x03 \x01(\t\x12\x36\n\x04size\x18\x04 \x01(\x0e\x32(.google.cloud.talent.v4beta1.CompanySize\x12\x1c\n\x14headquarters_address\x18\x05 \x01(\t\x12\x15\n\rhiring_agency\x18\x06 \x01(\x08\x12\x10\n\x08\x65\x65o_text\x18\x07 \x01(\t\x12\x13\n\x0bwebsite_uri\x18\x08 \x01(\t\x12\x17\n\x0f\x63\x61reer_site_uri\x18\t \x01(\t\x12\x11\n\timage_uri\x18\n \x01(\t\x12\x30\n(keyword_searchable_job_custom_attributes\x18\x0b \x03(\t\x12\x46\n\x0c\x64\x65rived_info\x18\x0c \x01(\x0b\x32\x30.google.cloud.talent.v4beta1.Company.DerivedInfo\x12\x11\n\tsuspended\x18\r \x01(\x08\x1aS\n\x0b\x44\x65rivedInfo\x12\x44\n\x15headquarters_location\x18\x01 \x01(\x0b\x32%.google.cloud.talent.v4beta1.LocationB\x82\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x14\x43ompanyResourceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n/google/cloud/talent_v4beta1/proto/company.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto"\xf8\x03\n\x07\x43ompany\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x19\n\x0c\x64isplay_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0b\x65xternal_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x36\n\x04size\x18\x04 \x01(\x0e\x32(.google.cloud.talent.v4beta1.CompanySize\x12\x1c\n\x14headquarters_address\x18\x05 \x01(\t\x12\x15\n\rhiring_agency\x18\x06 \x01(\x08\x12\x10\n\x08\x65\x65o_text\x18\x07 \x01(\t\x12\x13\n\x0bwebsite_uri\x18\x08 \x01(\t\x12\x17\n\x0f\x63\x61reer_site_uri\x18\t \x01(\t\x12\x11\n\timage_uri\x18\n \x01(\t\x12\x30\n(keyword_searchable_job_custom_attributes\x18\x0b \x03(\t\x12K\n\x0c\x64\x65rived_info\x18\x0c \x01(\x0b\x32\x30.google.cloud.talent.v4beta1.Company.DerivedInfoB\x03\xe0\x41\x03\x12\x16\n\tsuspended\x18\r \x01(\x08\x42\x03\xe0\x41\x03\x1aS\n\x0b\x44\x65rivedInfo\x12\x44\n\x15headquarters_location\x18\x01 \x01(\x0b\x32%.google.cloud.talent.v4beta1.LocationB\x82\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x14\x43ompanyResourceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2.DESCRIPTOR, ], ) @@ -72,8 +74,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=560, - serialized_end=643, + serialized_start=613, + serialized_end=696, ) _COMPANY = _descriptor.Descriptor( @@ -116,7 +118,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -134,7 +136,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -296,7 +298,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -314,7 +316,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -326,8 +328,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=159, - serialized_end=643, + serialized_start=192, + serialized_end=696, ) _COMPANY_DERIVEDINFO.fields_by_name[ @@ -379,10 +381,10 @@ Required during company update. The resource name for a company. This is generated by the service when a company is created. The format is "projects/{project\_id}/tenants/{tenan - t\_id}/companies/{company\_id}", for example, "projects/api- - test-project/tenants/foo/companies/bar". Tenant id is - optional and the default tenant is used if unspecified, for - example, "projects/api-test-project/companies/bar". + t\_id}/companies/{company\_id}", for example, + "projects/foo/tenants/bar/companies/baz". If tenant id is + unspecified, the default tenant is used. For example, + "projects/foo/companies/bar". display_name: Required. The display name of the company, for example, "Google LLC". @@ -391,35 +393,34 @@ identify the company. The maximum number of allowed characters is 255. size: - Optional. The employer's company size. + The employer's company size. headquarters_address: - Optional. The street address of the company's main - headquarters, which may be different from the job location. - The service attempts to geolocate the provided address, and - populates a more specific location wherever possible in [Deriv - edInfo.headquarters\_location][google.cloud.talent.v4beta1.Com - pany.DerivedInfo.headquarters\_location]. + The street address of the company's main headquarters, which + may be different from the job location. The service attempts + to geolocate the provided address, and populates a more + specific location wherever possible in [DerivedInfo.headquarte + rs\_location][google.cloud.talent.v4beta1.Company.DerivedInfo. + headquarters\_location]. hiring_agency: - Optional. Set to true if it is the hiring agency that post - jobs for other employers. Defaults to false if not provided. + Set to true if it is the hiring agency that post jobs for + other employers. Defaults to false if not provided. eeo_text: - Optional. Equal Employment Opportunity legal disclaimer text - to be associated with all jobs, and typically to be displayed - in all roles. The maximum number of allowed characters is - 500. + Equal Employment Opportunity legal disclaimer text to be + associated with all jobs, and typically to be displayed in all + roles. The maximum number of allowed characters is 500. website_uri: - Optional. The URI representing the company's primary web site - or home page, for example, "https://www.google.com". The - maximum number of allowed characters is 255. + The URI representing the company's primary web site or home + page, for example, "https://www.google.com". The maximum + number of allowed characters is 255. career_site_uri: - Optional. The URI to employer's career site or careers page on - the employer's web site, for example, + The URI to employer's career site or careers page on the + employer's web site, for example, "https://careers.google.com". image_uri: - Optional. A URI that hosts the employer's company logo. + A URI that hosts the employer's company logo. keyword_searchable_job_custom_attributes: - Optional. A list of keys of filterable [Job.custom\_attributes - ][google.cloud.talent.v4beta1.Job.custom\_attributes], whose + A list of keys of filterable [Job.custom\_attributes][google.c + loud.talent.v4beta1.Job.custom\_attributes], whose corresponding ``string_values`` are used in keyword searches. Jobs with ``string_values`` under these specified field keys are returned if any of the values match the search keyword. @@ -441,4 +442,8 @@ DESCRIPTOR._options = None +_COMPANY.fields_by_name["display_name"]._options = None +_COMPANY.fields_by_name["external_id"]._options = None +_COMPANY.fields_by_name["derived_info"]._options = None +_COMPANY.fields_by_name["suspended"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/talent/google/cloud/talent_v4beta1/proto/company_service.proto b/talent/google/cloud/talent_v4beta1/proto/company_service.proto index 1e48a46cd5b3..b3a091b24ced 100644 --- a/talent/google/cloud/talent_v4beta1/proto/company_service.proto +++ b/talent/google/cloud/talent_v4beta1/proto/company_service.proto @@ -19,6 +19,7 @@ package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/cloud/talent/v4beta1/common.proto"; import "google/cloud/talent/v4beta1/company.proto"; import "google/protobuf/empty.proto"; @@ -92,14 +93,12 @@ message CreateCompanyRequest { // Required. Resource name of the tenant under which the company is created. // // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenant/foo". - // - // Tenant id is optional and a default tenant is created if unspecified, for - // example, "projects/api-test-project". - string parent = 1; + // "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant + // is created, for example, "projects/foo". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The company to be created. - Company company = 2; + Company company = 2 [(google.api.field_behavior) = REQUIRED]; } // Request for getting a company by name. @@ -110,19 +109,18 @@ message GetCompanyRequest { // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", for // example, "projects/api-test-project/tenants/foo/companies/bar". // - // Tenant id is optional and the default tenant is used if unspecified, for + // If tenant id is unspecified, the default tenant is used, for // example, "projects/api-test-project/companies/bar". - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; } // Request for updating a specified company. message UpdateCompanyRequest { // Required. The company resource to replace the current resource in the // system. - Company company = 1; + Company company = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional but strongly recommended for the best service - // experience. + // Strongly recommended for the best service experience. // // If // [update_mask][google.cloud.talent.v4beta1.UpdateCompanyRequest.update_mask] @@ -142,11 +140,11 @@ message DeleteCompanyRequest { // // The format is // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", for - // example, "projects/api-test-project/tenants/foo/companies/bar". + // example, "projects/foo/tenants/bar/companies/baz". // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project/companies/bar". - string name = 1; + // If tenant id is unspecified, the default tenant is used, for + // example, "projects/foo/companies/bar". + string name = 1 [(google.api.field_behavior) = REQUIRED]; } // List companies for which the client has ACL visibility. @@ -154,20 +152,20 @@ message ListCompaniesRequest { // Required. Resource name of the tenant under which the company is created. // // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenant/foo". + // "projects/foo/tenant/bar". // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project". - string parent = 1; + // If tenant id is unspecified, the default tenant will be used, for + // example, "projects/foo". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. The starting indicator from which to return results. + // The starting indicator from which to return results. string page_token = 2; - // Optional. The maximum number of companies to be returned, at most 100. + // The maximum number of companies to be returned, at most 100. // Default is 100 if a non-positive number is provided. int32 page_size = 3; - // Optional. Set to true if the companies requested must have open jobs. + // Set to true if the companies requested must have open jobs. // // Defaults to false. // @@ -177,8 +175,6 @@ message ListCompaniesRequest { bool require_open_jobs = 4; } -// Output only. -// // The List companies response object. message ListCompaniesResponse { // Companies for the current client. diff --git a/talent/google/cloud/talent_v4beta1/proto/company_service_pb2.py b/talent/google/cloud/talent_v4beta1/proto/company_service_pb2.py index a1247cbbe1ef..5068e5d12595 100644 --- a/talent/google/cloud/talent_v4beta1/proto/company_service_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/company_service_pb2.py @@ -17,6 +17,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.talent_v4beta1.proto import ( common_pb2 as google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2, ) @@ -35,11 +36,12 @@ "\n\037com.google.cloud.talent.v4beta1B\023CompanyServiceProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n7google/cloud/talent_v4beta1/proto/company_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a/google/cloud/talent_v4beta1/proto/company.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"]\n\x14\x43reateCompanyRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x35\n\x07\x63ompany\x18\x02 \x01(\x0b\x32$.google.cloud.talent.v4beta1.Company"!\n\x11GetCompanyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"~\n\x14UpdateCompanyRequest\x12\x35\n\x07\x63ompany\x18\x01 \x01(\x0b\x32$.google.cloud.talent.v4beta1.Company\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"$\n\x14\x44\x65leteCompanyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"h\n\x14ListCompaniesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x19\n\x11require_open_jobs\x18\x04 \x01(\x08"\xaa\x01\n\x15ListCompaniesResponse\x12\x37\n\tcompanies\x18\x01 \x03(\x0b\x32$.google.cloud.talent.v4beta1.Company\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata2\x9f\t\n\x0e\x43ompanyService\x12\xd2\x01\n\rCreateCompany\x12\x31.google.cloud.talent.v4beta1.CreateCompanyRequest\x1a$.google.cloud.talent.v4beta1.Company"h\x82\xd3\xe4\x93\x02\x62"0/v4beta1/{parent=projects/*/tenants/*}/companies:\x01*Z+"&/v4beta1/{parent=projects/*}/companies:\x01*\x12\xc6\x01\n\nGetCompany\x12..google.cloud.talent.v4beta1.GetCompanyRequest\x1a$.google.cloud.talent.v4beta1.Company"b\x82\xd3\xe4\x93\x02\\\x12\x30/v4beta1/{name=projects/*/tenants/*/companies/*}Z(\x12&/v4beta1/{name=projects/*/companies/*}\x12\xe2\x01\n\rUpdateCompany\x12\x31.google.cloud.talent.v4beta1.UpdateCompanyRequest\x1a$.google.cloud.talent.v4beta1.Company"x\x82\xd3\xe4\x93\x02r28/v4beta1/{company.name=projects/*/tenants/*/companies/*}:\x01*Z32./v4beta1/{company.name=projects/*/companies/*}:\x01*\x12\xbe\x01\n\rDeleteCompany\x12\x31.google.cloud.talent.v4beta1.DeleteCompanyRequest\x1a\x16.google.protobuf.Empty"b\x82\xd3\xe4\x93\x02\\*0/v4beta1/{name=projects/*/tenants/*/companies/*}Z(*&/v4beta1/{name=projects/*/companies/*}\x12\xda\x01\n\rListCompanies\x12\x31.google.cloud.talent.v4beta1.ListCompaniesRequest\x1a\x32.google.cloud.talent.v4beta1.ListCompaniesResponse"b\x82\xd3\xe4\x93\x02\\\x12\x30/v4beta1/{parent=projects/*/tenants/*}/companiesZ(\x12&/v4beta1/{parent=projects/*}/companies\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB\x81\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x13\x43ompanyServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n7google/cloud/talent_v4beta1/proto/company_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a/google/cloud/talent_v4beta1/proto/company.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"g\n\x14\x43reateCompanyRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12:\n\x07\x63ompany\x18\x02 \x01(\x0b\x32$.google.cloud.talent.v4beta1.CompanyB\x03\xe0\x41\x02"&\n\x11GetCompanyRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"\x83\x01\n\x14UpdateCompanyRequest\x12:\n\x07\x63ompany\x18\x01 \x01(\x0b\x32$.google.cloud.talent.v4beta1.CompanyB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask")\n\x14\x44\x65leteCompanyRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"m\n\x14ListCompaniesRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x19\n\x11require_open_jobs\x18\x04 \x01(\x08"\xaa\x01\n\x15ListCompaniesResponse\x12\x37\n\tcompanies\x18\x01 \x03(\x0b\x32$.google.cloud.talent.v4beta1.Company\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata2\x9f\t\n\x0e\x43ompanyService\x12\xd2\x01\n\rCreateCompany\x12\x31.google.cloud.talent.v4beta1.CreateCompanyRequest\x1a$.google.cloud.talent.v4beta1.Company"h\x82\xd3\xe4\x93\x02\x62"0/v4beta1/{parent=projects/*/tenants/*}/companies:\x01*Z+"&/v4beta1/{parent=projects/*}/companies:\x01*\x12\xc6\x01\n\nGetCompany\x12..google.cloud.talent.v4beta1.GetCompanyRequest\x1a$.google.cloud.talent.v4beta1.Company"b\x82\xd3\xe4\x93\x02\\\x12\x30/v4beta1/{name=projects/*/tenants/*/companies/*}Z(\x12&/v4beta1/{name=projects/*/companies/*}\x12\xe2\x01\n\rUpdateCompany\x12\x31.google.cloud.talent.v4beta1.UpdateCompanyRequest\x1a$.google.cloud.talent.v4beta1.Company"x\x82\xd3\xe4\x93\x02r28/v4beta1/{company.name=projects/*/tenants/*/companies/*}:\x01*Z32./v4beta1/{company.name=projects/*/companies/*}:\x01*\x12\xbe\x01\n\rDeleteCompany\x12\x31.google.cloud.talent.v4beta1.DeleteCompanyRequest\x1a\x16.google.protobuf.Empty"b\x82\xd3\xe4\x93\x02\\*0/v4beta1/{name=projects/*/tenants/*/companies/*}Z(*&/v4beta1/{name=projects/*/companies/*}\x12\xda\x01\n\rListCompanies\x12\x31.google.cloud.talent.v4beta1.ListCompaniesRequest\x1a\x32.google.cloud.talent.v4beta1.ListCompaniesResponse"b\x82\xd3\xe4\x93\x02\\\x12\x30/v4beta1/{parent=projects/*/tenants/*}/companiesZ(\x12&/v4beta1/{parent=projects/*}/companies\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB\x81\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x13\x43ompanyServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_company__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, @@ -70,7 +72,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -88,7 +90,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -100,8 +102,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=303, - serialized_end=396, + serialized_start=336, + serialized_end=439, ) @@ -127,7 +129,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -139,8 +141,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=398, - serialized_end=431, + serialized_start=441, + serialized_end=479, ) @@ -166,7 +168,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -196,8 +198,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=433, - serialized_end=559, + serialized_start=482, + serialized_end=613, ) @@ -223,7 +225,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -235,8 +237,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=561, - serialized_end=597, + serialized_start=615, + serialized_end=656, ) @@ -262,7 +264,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -328,8 +330,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=599, - serialized_end=703, + serialized_start=658, + serialized_end=767, ) @@ -403,8 +405,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=706, - serialized_end=876, + serialized_start=770, + serialized_end=940, ) _CREATECOMPANYREQUEST.fields_by_name[ @@ -452,9 +454,8 @@ Required. Resource name of the tenant under which the company is created. The format is "projects/{project\_id}/tenants/{tenant\_id}", for example, - "projects/api-test-project/tenant/foo". Tenant id is optional - and a default tenant is created if unspecified, for example, - "projects/api-test-project". + "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created, for example, "projects/foo". company: Required. The company to be created. """, @@ -477,8 +478,8 @@ Required. The resource name of the company to be retrieved. The format is "projects/{project\_id}/tenants/{tenant\_id}/com panies/{company\_id}", for example, "projects/api-test- - project/tenants/foo/companies/bar". Tenant id is optional and - the default tenant is used if unspecified, for example, + project/tenants/foo/companies/bar". If tenant id is + unspecified, the default tenant is used, for example, "projects/api-test-project/companies/bar". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.GetCompanyRequest) @@ -500,14 +501,14 @@ Required. The company resource to replace the current resource in the system. update_mask: - Optional but strongly recommended for the best service - experience. If [update\_mask][google.cloud.talent.v4beta1.Upd - ateCompanyRequest.update\_mask] is provided, only the - specified fields in [company][google.cloud.talent.v4beta1.Upda - teCompanyRequest.company] are updated. Otherwise all the - fields are updated. A field mask to specify the company - fields to be updated. Only top level fields of - [Company][google.cloud.talent.v4beta1.Company] are supported. + Strongly recommended for the best service experience. If [upd + ate\_mask][google.cloud.talent.v4beta1.UpdateCompanyRequest.up + date\_mask] is provided, only the specified fields in [company + ][google.cloud.talent.v4beta1.UpdateCompanyRequest.company] + are updated. Otherwise all the fields are updated. A field + mask to specify the company fields to be updated. Only top + level fields of [Company][google.cloud.talent.v4beta1.Company] + are supported. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.UpdateCompanyRequest) ), @@ -527,10 +528,10 @@ name: Required. The resource name of the company to be deleted. The format is "projects/{project\_id}/tenants/{tenant\_id}/compani - es/{company\_id}", for example, "projects/api-test- - project/tenants/foo/companies/bar". Tenant id is optional and - the default tenant is used if unspecified, for example, - "projects/api-test-project/companies/bar". + es/{company\_id}", for example, + "projects/foo/tenants/bar/companies/baz". If tenant id is + unspecified, the default tenant is used, for example, + "projects/foo/companies/bar". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.DeleteCompanyRequest) ), @@ -551,20 +552,19 @@ Required. Resource name of the tenant under which the company is created. The format is "projects/{project\_id}/tenants/{tenant\_id}", for example, - "projects/api-test-project/tenant/foo". Tenant id is optional - and the default tenant is used if unspecified, for example, - "projects/api-test-project". + "projects/foo/tenant/bar". If tenant id is unspecified, the + default tenant will be used, for example, "projects/foo". page_token: - Optional. The starting indicator from which to return results. + The starting indicator from which to return results. page_size: - Optional. The maximum number of companies to be returned, at - most 100. Default is 100 if a non-positive number is provided. + The maximum number of companies to be returned, at most 100. + Default is 100 if a non-positive number is provided. require_open_jobs: - Optional. Set to true if the companies requested must have - open jobs. Defaults to false. If true, at most [page\_size][ - google.cloud.talent.v4beta1.ListCompaniesRequest.page\_size] - of companies are fetched, among which only those with open - jobs are returned. + Set to true if the companies requested must have open jobs. + Defaults to false. If true, at most [page\_size][google.cloud + .talent.v4beta1.ListCompaniesRequest.page\_size] of companies + are fetched, among which only those with open jobs are + returned. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.ListCompaniesRequest) ), @@ -577,9 +577,7 @@ dict( DESCRIPTOR=_LISTCOMPANIESRESPONSE, __module__="google.cloud.talent_v4beta1.proto.company_service_pb2", - __doc__="""Output only. - - The List companies response object. + __doc__="""The List companies response object. Attributes: @@ -598,6 +596,12 @@ DESCRIPTOR._options = None +_CREATECOMPANYREQUEST.fields_by_name["parent"]._options = None +_CREATECOMPANYREQUEST.fields_by_name["company"]._options = None +_GETCOMPANYREQUEST.fields_by_name["name"]._options = None +_UPDATECOMPANYREQUEST.fields_by_name["company"]._options = None +_DELETECOMPANYREQUEST.fields_by_name["name"]._options = None +_LISTCOMPANIESREQUEST.fields_by_name["parent"]._options = None _COMPANYSERVICE = _descriptor.ServiceDescriptor( name="CompanyService", @@ -607,8 +611,8 @@ serialized_options=_b( "\312A\023jobs.googleapis.com\322AShttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobs" ), - serialized_start=879, - serialized_end=2062, + serialized_start=943, + serialized_end=2126, methods=[ _descriptor.MethodDescriptor( name="CreateCompany", diff --git a/talent/google/cloud/talent_v4beta1/proto/completion_service.proto b/talent/google/cloud/talent_v4beta1/proto/completion_service.proto index 2b105c24beaf..94ec01360ac7 100644 --- a/talent/google/cloud/talent_v4beta1/proto/completion_service.proto +++ b/talent/google/cloud/talent_v4beta1/proto/completion_service.proto @@ -19,6 +19,7 @@ package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/cloud/talent/v4beta1/common.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent"; @@ -44,8 +45,6 @@ service Completion { } } -// Input only. -// // Auto-complete parameters. message CompleteQueryRequest { // Enum to specify the scope of completion. @@ -79,18 +78,18 @@ message CompleteQueryRequest { // Required. Resource name of tenant the completion is performed within. // // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenant/foo". + // "projects/foo/tenant/bar". // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project". - string parent = 1; + // If tenant id is unspecified, the default tenant is used, for + // example, "projects/foo". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The query used to generate suggestions. // // The maximum number of allowed characters is 255. - string query = 2; + string query = 2 [(google.api.field_behavior) = REQUIRED]; - // Optional. The list of languages of the query. This is + // The list of languages of the query. This is // the BCP-47 language code, such as "en-US" or "sr-Latn". // For more information, see // [Tags for Identifying Languages](https://tools.ietf.org/html/bcp47). @@ -121,33 +120,29 @@ message CompleteQueryRequest { // Required. Completion result count. // // The maximum allowed page size is 10. - int32 page_size = 4; + int32 page_size = 4 [(google.api.field_behavior) = REQUIRED]; - // Optional. If provided, restricts completion to specified company. + // If provided, restricts completion to specified company. // // The format is // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", for - // example, "projects/api-test-project/tenants/foo/companies/bar". + // example, "projects/foo/tenants/bar/companies/baz". // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project/companies/bar". + // If tenant id is unspecified, the default tenant is used, for + // example, "projects/foo". string company = 5; - // Optional. The scope of the completion. The defaults is + // The scope of the completion. The defaults is // [CompletionScope.PUBLIC][google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionScope.PUBLIC]. CompletionScope scope = 6; - // Optional. The completion topic. The default is + // The completion topic. The default is // [CompletionType.COMBINED][google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionType.COMBINED]. CompletionType type = 7; } -// Output only. -// // Response of auto-complete query. message CompleteQueryResponse { - // Output only. - // // Resource that represents completion results. message CompletionResult { // The suggestion for the query. diff --git a/talent/google/cloud/talent_v4beta1/proto/completion_service_pb2.py b/talent/google/cloud/talent_v4beta1/proto/completion_service_pb2.py index c62fde7c4329..b3fc611f5844 100644 --- a/talent/google/cloud/talent_v4beta1/proto/completion_service_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/completion_service_pb2.py @@ -17,6 +17,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.talent_v4beta1.proto import ( common_pb2 as google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2, ) @@ -30,11 +31,12 @@ "\n\037com.google.cloud.talent.v4beta1B\026CompletionServiceProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n:google/cloud/talent_v4beta1/proto/completion_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto"\xc2\x03\n\x14\x43ompleteQueryRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\r\n\x05query\x18\x02 \x01(\t\x12\x16\n\x0elanguage_codes\x18\x03 \x03(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x0f\n\x07\x63ompany\x18\x05 \x01(\t\x12P\n\x05scope\x18\x06 \x01(\x0e\x32\x41.google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionScope\x12N\n\x04type\x18\x07 \x01(\x0e\x32@.google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionType"K\n\x0f\x43ompletionScope\x12 \n\x1c\x43OMPLETION_SCOPE_UNSPECIFIED\x10\x00\x12\n\n\x06TENANT\x10\x01\x12\n\n\x06PUBLIC\x10\x02"`\n\x0e\x43ompletionType\x12\x1f\n\x1b\x43OMPLETION_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tJOB_TITLE\x10\x01\x12\x10\n\x0c\x43OMPANY_NAME\x10\x02\x12\x0c\n\x08\x43OMBINED\x10\x03"\xc5\x02\n\x15\x43ompleteQueryResponse\x12_\n\x12\x63ompletion_results\x18\x01 \x03(\x0b\x32\x43.google.cloud.talent.v4beta1.CompleteQueryResponse.CompletionResult\x12?\n\x08metadata\x18\x02 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata\x1a\x89\x01\n\x10\x43ompletionResult\x12\x12\n\nsuggestion\x18\x01 \x01(\t\x12N\n\x04type\x18\x02 \x01(\x0e\x32@.google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionType\x12\x11\n\timage_uri\x18\x03 \x01(\t2\xd5\x02\n\nCompletion\x12\xd8\x01\n\rCompleteQuery\x12\x31.google.cloud.talent.v4beta1.CompleteQueryRequest\x1a\x32.google.cloud.talent.v4beta1.CompleteQueryResponse"`\x82\xd3\xe4\x93\x02Z\x12//v4beta1/{parent=projects/*/tenants/*}:completeZ\'\x12%/v4beta1/{parent=projects/*}:complete\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB\x84\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x16\x43ompletionServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n:google/cloud/talent_v4beta1/proto/completion_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto"\xd1\x03\n\x14\x43ompleteQueryRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x12\n\x05query\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x16\n\x0elanguage_codes\x18\x03 \x03(\t\x12\x16\n\tpage_size\x18\x04 \x01(\x05\x42\x03\xe0\x41\x02\x12\x0f\n\x07\x63ompany\x18\x05 \x01(\t\x12P\n\x05scope\x18\x06 \x01(\x0e\x32\x41.google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionScope\x12N\n\x04type\x18\x07 \x01(\x0e\x32@.google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionType"K\n\x0f\x43ompletionScope\x12 \n\x1c\x43OMPLETION_SCOPE_UNSPECIFIED\x10\x00\x12\n\n\x06TENANT\x10\x01\x12\n\n\x06PUBLIC\x10\x02"`\n\x0e\x43ompletionType\x12\x1f\n\x1b\x43OMPLETION_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tJOB_TITLE\x10\x01\x12\x10\n\x0c\x43OMPANY_NAME\x10\x02\x12\x0c\n\x08\x43OMBINED\x10\x03"\xc5\x02\n\x15\x43ompleteQueryResponse\x12_\n\x12\x63ompletion_results\x18\x01 \x03(\x0b\x32\x43.google.cloud.talent.v4beta1.CompleteQueryResponse.CompletionResult\x12?\n\x08metadata\x18\x02 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata\x1a\x89\x01\n\x10\x43ompletionResult\x12\x12\n\nsuggestion\x18\x01 \x01(\t\x12N\n\x04type\x18\x02 \x01(\x0e\x32@.google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionType\x12\x11\n\timage_uri\x18\x03 \x01(\t2\xd5\x02\n\nCompletion\x12\xd8\x01\n\rCompleteQuery\x12\x31.google.cloud.talent.v4beta1.CompleteQueryRequest\x1a\x32.google.cloud.talent.v4beta1.CompleteQueryResponse"`\x82\xd3\xe4\x93\x02Z\x12//v4beta1/{parent=projects/*/tenants/*}:completeZ\'\x12%/v4beta1/{parent=projects/*}:complete\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB\x84\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x16\x43ompletionServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2.DESCRIPTOR, ], ) @@ -62,8 +64,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=472, - serialized_end=547, + serialized_start=520, + serialized_end=595, ) _sym_db.RegisterEnumDescriptor(_COMPLETEQUERYREQUEST_COMPLETIONSCOPE) @@ -92,8 +94,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=549, - serialized_end=645, + serialized_start=597, + serialized_end=693, ) _sym_db.RegisterEnumDescriptor(_COMPLETEQUERYREQUEST_COMPLETIONTYPE) @@ -120,7 +122,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -138,7 +140,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -174,7 +176,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -243,8 +245,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=195, - serialized_end=645, + serialized_start=228, + serialized_end=693, ) @@ -318,8 +320,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=836, - serialized_end=973, + serialized_start=884, + serialized_end=1021, ) _COMPLETEQUERYRESPONSE = _descriptor.Descriptor( @@ -374,8 +376,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=648, - serialized_end=973, + serialized_start=696, + serialized_end=1021, ) _COMPLETEQUERYREQUEST.fields_by_name[ @@ -408,9 +410,7 @@ dict( DESCRIPTOR=_COMPLETEQUERYREQUEST, __module__="google.cloud.talent_v4beta1.proto.completion_service_pb2", - __doc__="""Input only. - - Auto-complete parameters. + __doc__="""Auto-complete parameters. Attributes: @@ -418,15 +418,14 @@ Required. Resource name of tenant the completion is performed within. The format is "projects/{project\_id}/tenants/{tenant\_id}", for example, - "projects/api-test-project/tenant/foo". Tenant id is optional - and the default tenant is used if unspecified, for example, - "projects/api-test-project". + "projects/foo/tenant/bar". If tenant id is unspecified, the + default tenant is used, for example, "projects/foo". query: Required. The query used to generate suggestions. The maximum number of allowed characters is 255. language_codes: - Optional. The list of languages of the query. This is the - BCP-47 language code, such as "en-US" or "sr-Latn". For more + The list of languages of the query. This is the BCP-47 + language code, such as "en-US" or "sr-Latn". For more information, see `Tags for Identifying Languages `__. For [CompletionType.J OB\_TITLE][google.cloud.talent.v4beta1.CompleteQueryRequest.Co @@ -448,20 +447,20 @@ Required. Completion result count. The maximum allowed page size is 10. company: - Optional. If provided, restricts completion to specified - company. The format is "projects/{project\_id}/tenants/{tenan - t\_id}/companies/{company\_id}", for example, "projects/api- - test-project/tenants/foo/companies/bar". Tenant id is - optional and the default tenant is used if unspecified, for - example, "projects/api-test-project/companies/bar". + If provided, restricts completion to specified company. The + format is "projects/{project\_id}/tenants/{tenant\_id}/compani + es/{company\_id}", for example, + "projects/foo/tenants/bar/companies/baz". If tenant id is + unspecified, the default tenant is used, for example, + "projects/foo". scope: - Optional. The scope of the completion. The defaults is [Comple - tionScope.PUBLIC][google.cloud.talent.v4beta1.CompleteQueryReq - uest.CompletionScope.PUBLIC]. + The scope of the completion. The defaults is [CompletionScope. + PUBLIC][google.cloud.talent.v4beta1.CompleteQueryRequest.Compl + etionScope.PUBLIC]. type: - Optional. The completion topic. The default is [CompletionType - .COMBINED][google.cloud.talent.v4beta1.CompleteQueryRequest.Co - mpletionType.COMBINED]. + The completion topic. The default is [CompletionType.COMBINED] + [google.cloud.talent.v4beta1.CompleteQueryRequest.CompletionTy + pe.COMBINED]. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.CompleteQueryRequest) ), @@ -478,9 +477,7 @@ dict( DESCRIPTOR=_COMPLETEQUERYRESPONSE_COMPLETIONRESULT, __module__="google.cloud.talent_v4beta1.proto.completion_service_pb2", - __doc__="""Output only. - - Resource that represents completion results. + __doc__="""Resource that represents completion results. Attributes: @@ -498,9 +495,7 @@ ), DESCRIPTOR=_COMPLETEQUERYRESPONSE, __module__="google.cloud.talent_v4beta1.proto.completion_service_pb2", - __doc__="""Output only. - - Response of auto-complete query. + __doc__="""Response of auto-complete query. Attributes: @@ -518,6 +513,9 @@ DESCRIPTOR._options = None +_COMPLETEQUERYREQUEST.fields_by_name["parent"]._options = None +_COMPLETEQUERYREQUEST.fields_by_name["query"]._options = None +_COMPLETEQUERYREQUEST.fields_by_name["page_size"]._options = None _COMPLETION = _descriptor.ServiceDescriptor( name="Completion", @@ -527,8 +525,8 @@ serialized_options=_b( "\312A\023jobs.googleapis.com\322AShttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobs" ), - serialized_start=976, - serialized_end=1317, + serialized_start=1024, + serialized_end=1365, methods=[ _descriptor.MethodDescriptor( name="CompleteQuery", diff --git a/talent/google/cloud/talent_v4beta1/proto/event.proto b/talent/google/cloud/talent_v4beta1/proto/event.proto index d017a4ffa36e..41eaeefbc217 100644 --- a/talent/google/cloud/talent_v4beta1/proto/event.proto +++ b/talent/google/cloud/talent_v4beta1/proto/event.proto @@ -18,6 +18,7 @@ syntax = "proto3"; package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent"; @@ -32,17 +33,18 @@ option objc_class_prefix = "CTS"; // service to perform optimally. The number of events sent must be consistent // with other calls, such as job searches, issued to the service by the client. message ClientEvent { - // Optional but highly recommended. + // Strongly recommended for the best service experience. // // A unique ID generated in the API responses. It can be found in // [ResponseMetadata.request_id][google.cloud.talent.v4beta1.ResponseMetadata.request_id]. string request_id = 1; // Required. A unique identifier, generated by the client application. - string event_id = 2; + string event_id = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The timestamp of the event. - google.protobuf.Timestamp create_time = 4; + google.protobuf.Timestamp create_time = 4 + [(google.api.field_behavior) = REQUIRED]; // Required. // @@ -57,8 +59,8 @@ message ClientEvent { ProfileEvent profile_event = 6; } - // Optional. Notes about the event provided by recruiters or other users, for - // example, feedback on why a profile was bookmarked. + // Notes about the event provided by recruiters or other users, for example, + // feedback on why a profile was bookmarked. string event_notes = 9; } @@ -167,7 +169,7 @@ message JobEvent { // Required. The type of the event (see // [JobEventType][google.cloud.talent.v4beta1.JobEvent.JobEventType]). - JobEventType type = 1; + JobEventType type = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The [job name(s)][google.cloud.talent.v4beta1.Job.name] // associated with this event. For example, if this is an @@ -179,15 +181,15 @@ message JobEvent { // // The format is // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}", for - // example, "projects/api-test-project/tenants/foo/jobs/1234". - repeated string jobs = 2; + // example, "projects/foo/tenants/bar/jobs/baz". + repeated string jobs = 2 [(google.api.field_behavior) = REQUIRED]; - // Optional. The [profile name][google.cloud.talent.v4beta1.Profile.name] - // associated with this client event. + // The [profile name][google.cloud.talent.v4beta1.Profile.name] associated + // with this client event. // // The format is // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", - // for example, "projects/api-test-project/tenants/foo/profiles/bar". + // for example, "projects/foo/tenants/bar/profiles/baz". string profile = 3; } @@ -228,22 +230,21 @@ message ProfileEvent { } // Required. Type of event. - ProfileEventType type = 1; + ProfileEventType type = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The [profile name(s)][google.cloud.talent.v4beta1.Profile.name] // associated with this client event. // // The format is // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", - // for example, "projects/api-test-project/tenants/foo/profiles/bar". - repeated string profiles = 2; + // for example, "projects/foo/tenants/bar/profiles/baz". + repeated string profiles = 2 [(google.api.field_behavior) = REQUIRED]; - // Optional. The [job name(s)][google.cloud.talent.v4beta1.Job.name] - // associated with this client event. Leave it empty if the event isn't - // associated with a job. + // The [job name(s)][google.cloud.talent.v4beta1.Job.name] associated with + // this client event. Leave it empty if the event isn't associated with a job. // // The format is // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}", for - // example, "projects/api-test-project/tenants/foo/jobs/1234". + // example, "projects/foo/tenants/bar/jobs/baz". repeated string jobs = 6; } diff --git a/talent/google/cloud/talent_v4beta1/proto/event_pb2.py b/talent/google/cloud/talent_v4beta1/proto/event_pb2.py index d505d739c101..394fdf6d431a 100644 --- a/talent/google/cloud/talent_v4beta1/proto/event_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/event_pb2.py @@ -16,6 +16,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -27,10 +28,11 @@ "\n\037com.google.cloud.talent.v4beta1B\nEventProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n-google/cloud/talent_v4beta1/proto/event.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x82\x02\n\x0b\x43lientEvent\x12\x12\n\nrequest_id\x18\x01 \x01(\t\x12\x10\n\x08\x65vent_id\x18\x02 \x01(\t\x12/\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\tjob_event\x18\x05 \x01(\x0b\x32%.google.cloud.talent.v4beta1.JobEventH\x00\x12\x42\n\rprofile_event\x18\x06 \x01(\x0b\x32).google.cloud.talent.v4beta1.ProfileEventH\x00\x12\x13\n\x0b\x65vent_notes\x18\t \x01(\tB\x07\n\x05\x65vent"\xec\x03\n\x08JobEvent\x12@\n\x04type\x18\x01 \x01(\x0e\x32\x32.google.cloud.talent.v4beta1.JobEvent.JobEventType\x12\x0c\n\x04jobs\x18\x02 \x03(\t\x12\x0f\n\x07profile\x18\x03 \x01(\t"\xfe\x02\n\x0cJobEventType\x12\x1e\n\x1aJOB_EVENT_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nIMPRESSION\x10\x01\x12\x08\n\x04VIEW\x10\x02\x12\x11\n\rVIEW_REDIRECT\x10\x03\x12\x15\n\x11\x41PPLICATION_START\x10\x04\x12\x16\n\x12\x41PPLICATION_FINISH\x10\x05\x12 \n\x1c\x41PPLICATION_QUICK_SUBMISSION\x10\x06\x12\x18\n\x14\x41PPLICATION_REDIRECT\x10\x07\x12!\n\x1d\x41PPLICATION_START_FROM_SEARCH\x10\x08\x12$\n APPLICATION_REDIRECT_FROM_SEARCH\x10\t\x12\x1e\n\x1a\x41PPLICATION_COMPANY_SUBMIT\x10\n\x12\x0c\n\x08\x42OOKMARK\x10\x0b\x12\x10\n\x0cNOTIFICATION\x10\x0c\x12\t\n\x05HIRED\x10\r\x12\x0b\n\x07SENT_CV\x10\x0e\x12\x15\n\x11INTERVIEW_GRANTED\x10\x0f"\xd8\x01\n\x0cProfileEvent\x12H\n\x04type\x18\x01 \x01(\x0e\x32:.google.cloud.talent.v4beta1.ProfileEvent.ProfileEventType\x12\x10\n\x08profiles\x18\x02 \x03(\t\x12\x0c\n\x04jobs\x18\x06 \x03(\t"^\n\x10ProfileEventType\x12"\n\x1ePROFILE_EVENT_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nIMPRESSION\x10\x01\x12\x08\n\x04VIEW\x10\x02\x12\x0c\n\x08\x42OOKMARK\x10\x03\x42x\n\x1f\x63om.google.cloud.talent.v4beta1B\nEventProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n-google/cloud/talent_v4beta1/proto/event.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x8c\x02\n\x0b\x43lientEvent\x12\x12\n\nrequest_id\x18\x01 \x01(\t\x12\x15\n\x08\x65vent_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12:\n\tjob_event\x18\x05 \x01(\x0b\x32%.google.cloud.talent.v4beta1.JobEventH\x00\x12\x42\n\rprofile_event\x18\x06 \x01(\x0b\x32).google.cloud.talent.v4beta1.ProfileEventH\x00\x12\x13\n\x0b\x65vent_notes\x18\t \x01(\tB\x07\n\x05\x65vent"\xf6\x03\n\x08JobEvent\x12\x45\n\x04type\x18\x01 \x01(\x0e\x32\x32.google.cloud.talent.v4beta1.JobEvent.JobEventTypeB\x03\xe0\x41\x02\x12\x11\n\x04jobs\x18\x02 \x03(\tB\x03\xe0\x41\x02\x12\x0f\n\x07profile\x18\x03 \x01(\t"\xfe\x02\n\x0cJobEventType\x12\x1e\n\x1aJOB_EVENT_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nIMPRESSION\x10\x01\x12\x08\n\x04VIEW\x10\x02\x12\x11\n\rVIEW_REDIRECT\x10\x03\x12\x15\n\x11\x41PPLICATION_START\x10\x04\x12\x16\n\x12\x41PPLICATION_FINISH\x10\x05\x12 \n\x1c\x41PPLICATION_QUICK_SUBMISSION\x10\x06\x12\x18\n\x14\x41PPLICATION_REDIRECT\x10\x07\x12!\n\x1d\x41PPLICATION_START_FROM_SEARCH\x10\x08\x12$\n APPLICATION_REDIRECT_FROM_SEARCH\x10\t\x12\x1e\n\x1a\x41PPLICATION_COMPANY_SUBMIT\x10\n\x12\x0c\n\x08\x42OOKMARK\x10\x0b\x12\x10\n\x0cNOTIFICATION\x10\x0c\x12\t\n\x05HIRED\x10\r\x12\x0b\n\x07SENT_CV\x10\x0e\x12\x15\n\x11INTERVIEW_GRANTED\x10\x0f"\xe2\x01\n\x0cProfileEvent\x12M\n\x04type\x18\x01 \x01(\x0e\x32:.google.cloud.talent.v4beta1.ProfileEvent.ProfileEventTypeB\x03\xe0\x41\x02\x12\x15\n\x08profiles\x18\x02 \x03(\tB\x03\xe0\x41\x02\x12\x0c\n\x04jobs\x18\x06 \x03(\t"^\n\x10ProfileEventType\x12"\n\x1ePROFILE_EVENT_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nIMPRESSION\x10\x01\x12\x08\n\x04VIEW\x10\x02\x12\x0c\n\x08\x42OOKMARK\x10\x03\x42x\n\x1f\x63om.google.cloud.talent.v4beta1B\nEventProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, ], ) @@ -129,8 +131,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=513, - serialized_end=895, + serialized_start=566, + serialized_end=948, ) _sym_db.RegisterEnumDescriptor(_JOBEVENT_JOBEVENTTYPE) @@ -159,8 +161,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1020, - serialized_end=1114, + serialized_start=1083, + serialized_end=1177, ) _sym_db.RegisterEnumDescriptor(_PROFILEEVENT_PROFILEEVENTTYPE) @@ -205,7 +207,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -223,7 +225,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -297,8 +299,8 @@ fields=[], ) ], - serialized_start=142, - serialized_end=400, + serialized_start=175, + serialized_end=443, ) @@ -324,7 +326,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -342,7 +344,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -372,8 +374,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=403, - serialized_end=895, + serialized_start=446, + serialized_end=948, ) @@ -399,7 +401,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -417,7 +419,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -447,8 +449,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=898, - serialized_end=1114, + serialized_start=951, + serialized_end=1177, ) _CLIENTEVENT.fields_by_name[ @@ -492,9 +494,10 @@ Attributes: request_id: - Optional but highly recommended. A unique ID generated in the - API responses. It can be found in [ResponseMetadata.request\_i - d][google.cloud.talent.v4beta1.ResponseMetadata.request\_id]. + Strongly recommended for the best service experience. A + unique ID generated in the API responses. It can be found in [ + ResponseMetadata.request\_id][google.cloud.talent.v4beta1.Resp + onseMetadata.request\_id]. event_id: Required. A unique identifier, generated by the client application. @@ -509,9 +512,8 @@ An event issued when a profile searcher interacts with the application that implements Cloud Talent Solution. event_notes: - Optional. Notes about the event provided by recruiters or - other users, for example, feedback on why a profile was - bookmarked. + Notes about the event provided by recruiters or other users, + for example, feedback on why a profile was bookmarked. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.ClientEvent) ), @@ -543,14 +545,12 @@ event, this field contains the identifier of the viewed job. The format is "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}", - for example, "projects/api-test- - project/tenants/foo/jobs/1234". + for example, "projects/foo/tenants/bar/jobs/baz". profile: - Optional. The [profile - name][google.cloud.talent.v4beta1.Profile.name] associated - with this client event. The format is "projects/{project\_id} - /tenants/{tenant\_id}/profiles/{profile\_id}", for example, - "projects/api-test-project/tenants/foo/profiles/bar". + The [profile name][google.cloud.talent.v4beta1.Profile.name] + associated with this client event. The format is "projects/{p + roject\_id}/tenants/{tenant\_id}/profiles/{profile\_id}", for + example, "projects/foo/tenants/bar/profiles/baz". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.JobEvent) ), @@ -575,15 +575,13 @@ name(s)][google.cloud.talent.v4beta1.Profile.name] associated with this client event. The format is "projects/{project\_id} /tenants/{tenant\_id}/profiles/{profile\_id}", for example, - "projects/api-test-project/tenants/foo/profiles/bar". + "projects/foo/tenants/bar/profiles/baz". jobs: - Optional. The [job - name(s)][google.cloud.talent.v4beta1.Job.name] associated with - this client event. Leave it empty if the event isn't - associated with a job. The format is + The [job name(s)][google.cloud.talent.v4beta1.Job.name] + associated with this client event. Leave it empty if the event + isn't associated with a job. The format is "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}", - for example, "projects/api-test- - project/tenants/foo/jobs/1234". + for example, "projects/foo/tenants/bar/jobs/baz". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.ProfileEvent) ), @@ -592,4 +590,10 @@ DESCRIPTOR._options = None +_CLIENTEVENT.fields_by_name["event_id"]._options = None +_CLIENTEVENT.fields_by_name["create_time"]._options = None +_JOBEVENT.fields_by_name["type"]._options = None +_JOBEVENT.fields_by_name["jobs"]._options = None +_PROFILEEVENT.fields_by_name["type"]._options = None +_PROFILEEVENT.fields_by_name["profiles"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/talent/google/cloud/talent_v4beta1/proto/event_service.proto b/talent/google/cloud/talent_v4beta1/proto/event_service.proto index 966632e57d2e..6a7d94008c19 100644 --- a/talent/google/cloud/talent_v4beta1/proto/event_service.proto +++ b/talent/google/cloud/talent_v4beta1/proto/event_service.proto @@ -19,6 +19,7 @@ package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/cloud/talent/v4beta1/event.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent"; @@ -58,13 +59,11 @@ message CreateClientEventRequest { // Required. Resource name of the tenant under which the event is created. // // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenant/foo". - // - // Tenant id is optional and a default tenant is created if unspecified, for - // example, "projects/api-test-project". - string parent = 1; + // "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant + // is created, for example, "projects/foo". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Events issued when end user interacts with customer's application // that uses Cloud Talent Solution. - ClientEvent client_event = 2; + ClientEvent client_event = 2 [(google.api.field_behavior) = REQUIRED]; } diff --git a/talent/google/cloud/talent_v4beta1/proto/event_service_pb2.py b/talent/google/cloud/talent_v4beta1/proto/event_service_pb2.py index 7cf0778adcc7..d78561209e90 100644 --- a/talent/google/cloud/talent_v4beta1/proto/event_service_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/event_service_pb2.py @@ -17,6 +17,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.talent_v4beta1.proto import ( event_pb2 as google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_event__pb2, ) @@ -30,11 +31,12 @@ "\n\037com.google.cloud.talent.v4beta1B\021EventServiceProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n5google/cloud/talent_v4beta1/proto/event_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a-google/cloud/talent_v4beta1/proto/event.proto"j\n\x18\x43reateClientEventRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12>\n\x0c\x63lient_event\x18\x02 \x01(\x0b\x32(.google.cloud.talent.v4beta1.ClientEvent2\xe3\x02\n\x0c\x45ventService\x12\xe4\x01\n\x11\x43reateClientEvent\x12\x35.google.cloud.talent.v4beta1.CreateClientEventRequest\x1a(.google.cloud.talent.v4beta1.ClientEvent"n\x82\xd3\xe4\x93\x02h"3/v4beta1/{parent=projects/*/tenants/*}/clientEvents:\x01*Z.")/v4beta1/{parent=projects/*}/clientEvents:\x01*\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB\x7f\n\x1f\x63om.google.cloud.talent.v4beta1B\x11\x45ventServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n5google/cloud/talent_v4beta1/proto/event_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a-google/cloud/talent_v4beta1/proto/event.proto"t\n\x18\x43reateClientEventRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x43\n\x0c\x63lient_event\x18\x02 \x01(\x0b\x32(.google.cloud.talent.v4beta1.ClientEventB\x03\xe0\x41\x02\x32\xe3\x02\n\x0c\x45ventService\x12\xe4\x01\n\x11\x43reateClientEvent\x12\x35.google.cloud.talent.v4beta1.CreateClientEventRequest\x1a(.google.cloud.talent.v4beta1.ClientEvent"n\x82\xd3\xe4\x93\x02h"3/v4beta1/{parent=projects/*/tenants/*}/clientEvents:\x01*Z.")/v4beta1/{parent=projects/*}/clientEvents:\x01*\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB\x7f\n\x1f\x63om.google.cloud.talent.v4beta1B\x11\x45ventServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_event__pb2.DESCRIPTOR, ], ) @@ -62,7 +64,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -80,7 +82,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -92,8 +94,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=188, - serialized_end=294, + serialized_start=221, + serialized_end=337, ) _CREATECLIENTEVENTREQUEST.fields_by_name[ @@ -118,9 +120,8 @@ Required. Resource name of the tenant under which the event is created. The format is "projects/{project\_id}/tenants/{tenant\_id}", for example, - "projects/api-test-project/tenant/foo". Tenant id is optional - and a default tenant is created if unspecified, for example, - "projects/api-test-project". + "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created, for example, "projects/foo". client_event: Required. Events issued when end user interacts with customer's application that uses Cloud Talent Solution. @@ -132,6 +133,8 @@ DESCRIPTOR._options = None +_CREATECLIENTEVENTREQUEST.fields_by_name["parent"]._options = None +_CREATECLIENTEVENTREQUEST.fields_by_name["client_event"]._options = None _EVENTSERVICE = _descriptor.ServiceDescriptor( name="EventService", @@ -141,8 +144,8 @@ serialized_options=_b( "\312A\023jobs.googleapis.com\322AShttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobs" ), - serialized_start=297, - serialized_end=652, + serialized_start=340, + serialized_end=695, methods=[ _descriptor.MethodDescriptor( name="CreateClientEvent", diff --git a/talent/google/cloud/talent_v4beta1/proto/filters.proto b/talent/google/cloud/talent_v4beta1/proto/filters.proto index 35f5d8279457..caf0a28f2b16 100644 --- a/talent/google/cloud/talent_v4beta1/proto/filters.proto +++ b/talent/google/cloud/talent_v4beta1/proto/filters.proto @@ -18,8 +18,10 @@ syntax = "proto3"; package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; import "google/cloud/talent/v4beta1/common.proto"; import "google/protobuf/duration.proto"; +import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; import "google/protobuf/wrappers.proto"; import "google/type/date.proto"; @@ -32,17 +34,26 @@ option java_outer_classname = "FiltersProto"; option java_package = "com.google.cloud.talent.v4beta1"; option objc_class_prefix = "CTS"; -// Input only. -// // The query required to perform a search query. message JobQuery { - // Optional. The query string that matches against the job title, description, - // and location fields. + // The query string that matches against the job title, description, and + // location fields. // // The maximum number of allowed characters is 255. string query = 1; - // Optional. This filter specifies the company entities to search against. + // The language code of [query][google.cloud.talent.v4beta1.JobQuery.query]. + // For example, "en-US". This field helps to better interpret the query. + // + // If a value isn't specified, the query language code is automatically + // detected, which may not be accurate. + // + // Language code should be in BCP-47 format, such as "en-US" or "sr-Latn". + // For more information, see + // [Tags for Identifying Languages](https://tools.ietf.org/html/bcp47). + string query_language_code = 14; + + // This filter specifies the company entities to search against. // // If a value isn't specified, jobs are searched for against all // companies. @@ -51,16 +62,16 @@ message JobQuery { // companies specified. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", for - // example, "projects/api-test-project/tenants/foo/companies/bar". + // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}". For + // example, "projects/foo/tenants/bar/companies/baz". // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project/companies/bar". + // If tenant id is unspecified, the default tenant is used. For + // example, "projects/foo/companies/bar". // // At most 20 company filters are allowed. repeated string companies = 2; - // Optional. The location filter specifies geo-regions containing the jobs to + // The location filter specifies geo-regions containing the jobs to // search against. See // [LocationFilter][google.cloud.talent.v4beta1.LocationFilter] for more // information. @@ -76,9 +87,9 @@ message JobQuery { // At most 5 location filters are allowed. repeated LocationFilter location_filters = 3; - // Optional. The category filter specifies the categories of jobs to search - // against. See [JobCategory][google.cloud.talent.v4beta1.JobCategory] for - // more information. + // The category filter specifies the categories of jobs to search against. + // See [JobCategory][google.cloud.talent.v4beta1.JobCategory] for more + // information. // // If a value isn't specified, jobs from any category are searched against. // @@ -86,8 +97,7 @@ message JobQuery { // categories are searched against. repeated JobCategory job_categories = 4; - // Optional. Allows filtering jobs by commute time with different travel - // methods (for + // Allows filtering jobs by commute time with different travel methods (for // example, driving or public transit). // // Note: This only works when you specify a @@ -98,7 +108,7 @@ message JobQuery { // Currently we don't support sorting by commute time. CommuteFilter commute_filter = 5; - // Optional. This filter specifies the exact company + // This filter specifies the exact company // [Company.display_name][google.cloud.talent.v4beta1.Company.display_name] of // the jobs to search against. // @@ -111,14 +121,14 @@ message JobQuery { // At most 20 company display name filters are allowed. repeated string company_display_names = 6; - // Optional. This search filter is applied only to + // This search filter is applied only to // [Job.compensation_info][google.cloud.talent.v4beta1.Job.compensation_info]. // For example, if the filter is specified as "Hourly job with per-hour // compensation > $15", only jobs meeting these criteria are searched. If a // filter isn't defined, all open jobs are searched. CompensationFilter compensation_filter = 7; - // Optional. This filter specifies a structured syntax to match against the + // This filter specifies a structured syntax to match against the // [Job.custom_attributes][google.cloud.talent.v4beta1.Job.custom_attributes] // marked as `filterable`. // @@ -143,15 +153,15 @@ message JobQuery { // driving_years > 10` string custom_attribute_filter = 8; - // Optional. This flag controls the spell-check feature. If false, the + // This flag controls the spell-check feature. If false, the // service attempts to correct a misspelled query, // for example, "enginee" is corrected to "engineer". // // Defaults to false: a spell check is performed. bool disable_spell_check = 9; - // Optional. The employment type filter specifies the employment type of jobs - // to search against, such as + // The employment type filter specifies the employment type of jobs to + // search against, such as // [EmploymentType.FULL_TIME][google.cloud.talent.v4beta1.EmploymentType.FULL_TIME]. // // If a value isn't specified, jobs in the search results includes any @@ -161,7 +171,7 @@ message JobQuery { // any of the specified employment types. repeated EmploymentType employment_types = 10; - // Optional. This filter specifies the locale of jobs to search against, + // This filter specifies the locale of jobs to search against, // for example, "en-US". // // If a value isn't specified, the search results can contain jobs in any @@ -175,12 +185,11 @@ message JobQuery { // At most 10 language code filters are allowed. repeated string language_codes = 11; - // Optional. Jobs published within a range specified by this filter are - // searched against. + // Jobs published within a range specified by this filter are searched + // against. TimestampRange publish_time_range = 12; - // Optional. This filter specifies a list of job names to be excluded during - // search. + // This filter specifies a list of job names to be excluded during search. // // At most 400 excluded job names are allowed. repeated string excluded_jobs = 13; @@ -188,13 +197,13 @@ message JobQuery { // Filters to apply when performing the search query. message ProfileQuery { - // Optional. Keywords to match any text fields of profiles. + // Keywords to match any text fields of profiles. // // For example, "software engineer in Palo Alto". string query = 1; - // Optional. The location filter specifies geo-regions containing the profiles - // to search against. + // The location filter specifies geo-regions containing the profiles to + // search against. // // One of // [LocationFilter.address][google.cloud.talent.v4beta1.LocationFilter.address] @@ -208,15 +217,18 @@ message ProfileQuery { // // The following logic is used to determine which locations in // the profile to filter against: + // // 1. All of the profile's geocoded // [Profile.addresses][google.cloud.talent.v4beta1.Profile.addresses] where // [Address.usage][google.cloud.talent.v4beta1.Address.usage] is PERSONAL and // [Address.current][google.cloud.talent.v4beta1.Address.current] is true. + // // 2. If the above set of locations is empty, all of the profile's geocoded // [Profile.addresses][google.cloud.talent.v4beta1.Profile.addresses] where // [Address.usage][google.cloud.talent.v4beta1.Address.usage] is // CONTACT_INFO_USAGE_UNSPECIFIED and // [Address.current][google.cloud.talent.v4beta1.Address.current] is true. + // // 3. If the above set of locations is empty, all of the profile's geocoded // [Profile.addresses][google.cloud.talent.v4beta1.Profile.addresses] where // [Address.usage][google.cloud.talent.v4beta1.Address.usage] is PERSONAL or @@ -294,7 +306,7 @@ message ProfileQuery { // is negative, an error is thrown. repeated LocationFilter location_filters = 2; - // Optional. Job title filter specifies job titles of profiles to match on. + // Job title filter specifies job titles of profiles to match on. // // If a job title isn't specified, profiles with any titles are retrieved. // @@ -308,7 +320,7 @@ message ProfileQuery { // For example, search for profiles with a job title "Product Manager". repeated JobTitleFilter job_title_filters = 3; - // Optional. Employer filter specifies employers of profiles to match on. + // Employer filter specifies employers of profiles to match on. // // If an employer filter isn't specified, profiles with any employers are // retrieved. @@ -324,7 +336,7 @@ message ProfileQuery { // LLC". repeated EmployerFilter employer_filters = 4; - // Optional. Education filter specifies education of profiles to match on. + // Education filter specifies education of profiles to match on. // // If an education filter isn't specified, profiles with any education are // retrieved. @@ -339,7 +351,7 @@ message ProfileQuery { // For example, search for profiles with a master degree. repeated EducationFilter education_filters = 5; - // Optional. Skill filter specifies skill of profiles to match on. + // Skill filter specifies skill of profiles to match on. // // If a skill filter isn't specified, profiles with any skills are retrieved. // @@ -353,8 +365,8 @@ message ProfileQuery { // list. repeated SkillFilter skill_filters = 6; - // Optional. Work experience filter specifies the total working experience of - // profiles to match on. + // Work experience filter specifies the total working experience of profiles + // to match on. // // If a work experience filter isn't specified, profiles with any // professional experience are retrieved. @@ -365,29 +377,26 @@ message ProfileQuery { // For example, search for profiles with 10 years of work experience. repeated WorkExperienceFilter work_experience_filter = 7; - // Optional. Time filter specifies the create/update timestamp of the profiles - // to match on. + // Time filter specifies the create/update timestamp of the profiles to match + // on. // // For example, search for profiles created since "2018-1-1". repeated TimeFilter time_filters = 8; - // Optional. The hirable filter specifies the profile's hirable status to - // match on. + // The hirable filter specifies the profile's hirable status to match on. google.protobuf.BoolValue hirable_filter = 9; - // Optional. The application date filters specify application date ranges to - // match on. + // The application date filters specify application date ranges to match on. repeated ApplicationDateFilter application_date_filters = 10; - // Optional. The application outcome notes filters specify the notes for the - // outcome of the job application. + // The application outcome notes filters specify the notes for the outcome of + // the job application. repeated ApplicationOutcomeNotesFilter application_outcome_notes_filters = 11; - // Optional. The application job filters specify the job applied for in the - // application. + // The application job filters specify the job applied for in the application. repeated ApplicationJobFilter application_job_filters = 13; - // Optional. This filter specifies a structured syntax to match against the + // This filter specifies a structured syntax to match against the // [Profile.custom_attributes][google.cloud.talent.v4beta1.Profile.custom_attributes] // that are marked as `filterable`. // @@ -414,8 +423,10 @@ message ProfileQuery { // (key1 = "TEST" OR LOWER(key1)="test" OR NOT EMPTY(key1)) string custom_attribute_filter = 15; - // Optional. The candidate availability filter which filters based on - // availability signals. + // Deprecated. Use availability_filters instead. + // + // The candidate availability filter which filters based on availability + // signals. // // Signal 1: Number of days since most recent job application. See // [Availability.JobApplicationAvailabilitySignal][google.cloud.talent.v4beta1.Availability.JobApplicationAvailabilitySignal] @@ -430,9 +441,28 @@ message ProfileQuery { // based on an aggregated set of signals. Specifically, the intent is NOT to // indicate the candidate's potential qualification / interest / close ability // for a specific job. - CandidateAvailabilityFilter candidate_availability_filter = 16; + CandidateAvailabilityFilter candidate_availability_filter = 16 + [deprecated = true]; + + // The availability filter which filters based on + // [Profile.availability_signals][google.cloud.talent.v4beta1.Profile.availability_signals]. + // + // The availability filter helps a recruiter understand if a + // specific candidate is likely to be actively seeking new job opportunities + // based on an aggregated set of signals. Specifically, the intent is NOT to + // indicate the candidate's potential qualification / interest / close ability + // for a specific job. + // + // There can be at most one + // [AvailabilityFilter][google.cloud.talent.v4beta1.AvailabilityFilter] per + // [signal_type][google.cloud.talent.v4beta1.AvailabilityFilter.signal_type]. + // If there are multiple + // [AvailabilityFilter][google.cloud.talent.v4beta1.AvailabilityFilter] for a + // [signal_type][google.cloud.talent.v4beta1.AvailabilityFilter.signal_type], + // an error is thrown. + repeated AvailabilityFilter availability_filters = 18; - // Optional. Person name filter specifies person name of profiles to match on. + // Person name filter specifies person name of profiles to match on. // // If multiple person name filters are specified, profiles that match any // person name filters are retrieved. @@ -441,8 +471,6 @@ message ProfileQuery { repeated PersonNameFilter person_name_filters = 17; } -// Input only. -// // Geographic region of the search. message LocationFilter { // Specify whether to include telecommute jobs. @@ -457,12 +485,12 @@ message LocationFilter { TELECOMMUTE_ALLOWED = 2; } - // Optional. The address name, such as "Mountain View" or "Bay Area". + // The address name, such as "Mountain View" or "Bay Area". string address = 1; - // Optional. CLDR region code of the country/region of the address. This is - // used to address ambiguity of the user-input location, for example, - // "Liverpool" against "Liverpool, NY, US" or "Liverpool, UK". + // CLDR region code of the country/region of the address. This is used + // to address ambiguity of the user-input location, for example, "Liverpool" + // against "Liverpool, NY, US" or "Liverpool, UK". // // Set this field to bias location resolution toward a specific country // or territory. If this field is not set, application behavior is biased @@ -474,16 +502,16 @@ message LocationFilter { // Note that this filter is not applicable for Profile Search related queries. string region_code = 2; - // Optional. The latitude and longitude of the geographic center to search - // from. This field is ignored if `address` is provided. + // The latitude and longitude of the geographic center to search from. This + // field is ignored if `address` is provided. google.type.LatLng lat_lng = 3; - // Optional. The distance_in_miles is applied when the location being searched - // for is identified as a city or smaller. This field is ignored if the - // location being searched for is a state or larger. + // The distance_in_miles is applied when the location being searched for is + // identified as a city or smaller. This field is ignored if the location + // being searched for is a state or larger. double distance_in_miles = 4; - // Optional. Allows the client to return jobs without a + // Allows the client to return jobs without a // set location, specifically, telecommuting jobs (telecommuting is considered // by the service as a special location. // [Job.posting_region][google.cloud.talent.v4beta1.Job.posting_region] @@ -506,15 +534,13 @@ message LocationFilter { // This field is only used for job search requests. TelecommutePreference telecommute_preference = 5; - // Optional. Whether to apply negation to the filter so profiles matching the - // filter are excluded. + // Whether to apply negation to the filter so profiles matching the filter + // are excluded. // // Currently only supported in profile search. bool negated = 6; } -// Input only. -// // Filter on job compensation type and amount. message CompensationFilter { // Specify the type of filtering. @@ -564,22 +590,21 @@ message CompensationFilter { } // Required. Type of filter. - FilterType type = 1; + FilterType type = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Specify desired `base compensation entry's` // [CompensationInfo.CompensationUnit][google.cloud.talent.v4beta1.CompensationInfo.CompensationUnit]. - repeated CompensationInfo.CompensationUnit units = 2; + repeated CompensationInfo.CompensationUnit units = 2 + [(google.api.field_behavior) = REQUIRED]; - // Optional. Compensation range. + // Compensation range. CompensationInfo.CompensationRange range = 3; - // Optional. If set to true, jobs with unspecified compensation range fields - // are included. + // If set to true, jobs with unspecified compensation range fields are + // included. bool include_jobs_with_unspecified_compensation_range = 4; } -// Input only. -// // Parameters needed for commute search. message CommuteFilter { // The traffic density to use when calculating commute time. @@ -594,69 +619,60 @@ message CommuteFilter { BUSY_HOUR = 2; } - // Required. The method of transportation for which to calculate the commute - // time. - CommuteMethod commute_method = 1; + // Required. The method of transportation to calculate the commute time for. + CommuteMethod commute_method = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The latitude and longitude of the location from which to - // calculate the commute time. - google.type.LatLng start_coordinates = 2; + // Required. The latitude and longitude of the location to calculate the + // commute time from. + google.type.LatLng start_coordinates = 2 + [(google.api.field_behavior) = REQUIRED]; // Required. The maximum travel time in seconds. The maximum allowed value is // `3600s` (one hour). Format is `123s`. - google.protobuf.Duration travel_duration = 3; - - // Optional. If `true`, jobs without street level addresses may also be - // returned. For city level addresses, the city center is used. For state and - // coarser level addresses, text matching is used. If this field is set to - // `false` or isn't specified, only jobs that include street level addresses - // will be returned by commute search. + google.protobuf.Duration travel_duration = 3 + [(google.api.field_behavior) = REQUIRED]; + + // If `true`, jobs without street level addresses may also be returned. + // For city level addresses, the city center is used. For state and coarser + // level addresses, text matching is used. + // If this field is set to `false` or isn't specified, only jobs that include + // street level addresses will be returned by commute search. bool allow_imprecise_addresses = 4; - // Optional. - // // Traffic factor to take into account while searching by commute. oneof traffic_option { - // Optional. Specifies the traffic density to use when calculating commute - // time. + // Specifies the traffic density to use when calculating commute time. RoadTraffic road_traffic = 5; - // Optional. The departure time used to calculate traffic impact, - // represented as [google.type.TimeOfDay][google.type.TimeOfDay] in local - // time zone. + // The departure time used to calculate traffic impact, represented as + // [google.type.TimeOfDay][google.type.TimeOfDay] in local time zone. // // Currently traffic model is restricted to hour level resolution. google.type.TimeOfDay departure_time = 6; } } -// Input only. -// // Job title of the search. message JobTitleFilter { - // Required. The job title, for example, "Software engineer", or "Product + // Required. The job title. For example, "Software engineer", or "Product // manager". - string job_title = 1; + string job_title = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. Whether to apply negation to the filter so profiles matching the - // filter are excluded. + // Whether to apply negation to the filter so profiles matching the filter + // are excluded. bool negated = 2; } -// Input only. -// // Skill filter of the search. message SkillFilter { // Required. The skill name. For example, "java", "j2ee", and so on. - string skill = 1; + string skill = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. Whether to apply negation to the filter so profiles matching the - // filter are excluded. + // Whether to apply negation to the filter so profiles matching the filter + // are excluded. bool negated = 2; } -// Input only. -// // Employer filter of the search. message EmployerFilter { // Enum indicating which set of @@ -680,9 +696,9 @@ message EmployerFilter { } // Required. The name of the employer, for example "Google", "Alphabet". - string employer = 1; + string employer = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. Define set of + // Define set of // [EmploymentRecord][google.cloud.talent.v4beta1.EmploymentRecord]s to search // against. // @@ -690,36 +706,31 @@ message EmployerFilter { // [EmployerFilterMode.ALL_EMPLOYMENT_RECORDS][google.cloud.talent.v4beta1.EmployerFilter.EmployerFilterMode.ALL_EMPLOYMENT_RECORDS]. EmployerFilterMode mode = 2; - // Optional. Whether to apply negation to the filter so profiles matching the - // filter is excluded. + // Whether to apply negation to the filter so profiles matching the filter + // is excluded. bool negated = 3; } -// Input only. -// // Education filter of the search. message EducationFilter { - // Optional. The school name. For example "MIT", "University of California, - // Berkeley". + // The school name. For example "MIT", "University of California, Berkeley". string school = 1; - // Optional. The field of study. This is to search against value provided in + // The field of study. This is to search against value provided in // [Degree.fields_of_study][google.cloud.talent.v4beta1.Degree.fields_of_study]. // For example "Computer Science", "Mathematics". string field_of_study = 2; - // Optional. Education degree in ISCED code. Each value in degree covers a - // specific level of education, without any expansion to upper nor lower - // levels of education degree. + // Education degree in ISCED code. Each value in degree covers a specific + // level of education, without any expansion to upper nor lower levels of + // education degree. DegreeType degree_type = 3; - // Optional. Whether to apply negation to the filter so profiles matching the - // filter is excluded. + // Whether to apply negation to the filter so profiles matching the filter + // is excluded. bool negated = 6; } -// Input only. -// // Work experience filter. // // This filter is used to search for profiles with working experience length @@ -728,15 +739,13 @@ message EducationFilter { // and // [max_experience][google.cloud.talent.v4beta1.WorkExperienceFilter.max_experience]. message WorkExperienceFilter { - // Optional. The minimum duration of the work experience (inclusive). + // The minimum duration of the work experience (inclusive). google.protobuf.Duration min_experience = 1; - // Optional. The maximum duration of the work experience (exclusive). + // The maximum duration of the work experience (exclusive). google.protobuf.Duration max_experience = 2; } -// Input only. -// // Application Date Range Filter. // // The API matches profiles with @@ -747,54 +756,47 @@ message WorkExperienceFilter { // and [end_date][google.cloud.talent.v4beta1.ApplicationDateFilter.end_date] // are missing. message ApplicationDateFilter { - // Optional. Start date. If it's missing, The API matches profiles with - // application date not after the end date. + // Start date. If it's missing, The API matches profiles with application date + // not after the end date. google.type.Date start_date = 1; - // Optional. End date. If it's missing, The API matches profiles with - // application date not before the start date. + // End date. If it's missing, The API matches profiles with application date + // not before the start date. google.type.Date end_date = 2; } -// Input only. -// // Outcome Notes Filter. message ApplicationOutcomeNotesFilter { // Required. User entered or selected outcome reason. The API does an exact // match on the // [Application.outcome_notes][google.cloud.talent.v4beta1.Application.outcome_notes] // in profiles. - string outcome_notes = 1; + string outcome_notes = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. If true, The API excludes all candidates with any + // If true, The API excludes all candidates with any // [Application.outcome_notes][google.cloud.talent.v4beta1.Application.outcome_notes] // matching the outcome reason specified in the filter. bool negated = 2; } -// Input only. -// // Filter on the job information of Application. message ApplicationJobFilter { - // Optional. The job requisition id in the application. The API does an exact - // match on the - // [Job.requisition_id][google.cloud.talent.v4beta1.Job.requisition_id] of + // The job requisition id in the application. The API does an exact match on + // the [Job.requisition_id][google.cloud.talent.v4beta1.Job.requisition_id] of // [Application.job][google.cloud.talent.v4beta1.Application.job] in profiles. string job_requisition_id = 2; - // Optional. The job title in the application. The API does an exact match on - // the [Job.title][google.cloud.talent.v4beta1.Job.title] of + // The job title in the application. The API does an exact match on the + // [Job.title][google.cloud.talent.v4beta1.Job.title] of // [Application.job][google.cloud.talent.v4beta1.Application.job] in profiles. string job_title = 3; - // Optional. If true, the API excludes all profiles with any + // If true, the API excludes all profiles with any // [Application.job][google.cloud.talent.v4beta1.Application.job] matching the // filters. bool negated = 4; } -// Input only. -// // Filter on create timestamp or update timestamp of profiles. message TimeFilter { // Time fields can be used in TimeFilter. @@ -809,34 +811,53 @@ message TimeFilter { UPDATE_TIME = 2; } - // Optional. Start timestamp, matching profiles with the start time. If this - // field missing, The API matches profiles with create / update timestamp - // before the end timestamp. + // Start timestamp, matching profiles with the start time. If this field + // missing, The API matches profiles with create / update timestamp before the + // end timestamp. google.protobuf.Timestamp start_time = 1; - // Optional. End timestamp, matching profiles with the end time. If this field + // End timestamp, matching profiles with the end time. If this field // missing, The API matches profiles with create / update timestamp after the // start timestamp. google.protobuf.Timestamp end_time = 2; - // Optional. Specifies which time field to filter profiles. + // Specifies which time field to filter profiles. // // Defaults to // [TimeField.CREATE_TIME][google.cloud.talent.v4beta1.TimeFilter.TimeField.CREATE_TIME]. TimeField time_field = 3; } -// Input only +// Deprecated. Use AvailabilityFilter instead. // // Filter on availability signals. message CandidateAvailabilityFilter { - // Optional. It is false by default. If true, API excludes all the potential - // available profiles. + option deprecated = true; + + // It is false by default. If true, API excludes all the potential available + // profiles. bool negated = 1; } -// Input only. -// +// Filter on availability signals. +message AvailabilityFilter { + // Required. Type of signal to apply filter on. + AvailabilitySignalType signal_type = 1 + [(google.api.field_behavior) = REQUIRED]; + + // Required. Range of times to filter candidate signals by. + TimestampRange range = 2 [(google.api.field_behavior) = REQUIRED]; + + // If multiple + // [AvailabilityFilter][google.cloud.talent.v4beta1.AvailabilityFilter] are + // provided, the default behavior is to OR all filters, but if this field is + // set to true, this particular + // [AvailabilityFilter][google.cloud.talent.v4beta1.AvailabilityFilter] will + // be AND'ed against other + // [AvailabilityFilter][google.cloud.talent.v4beta1.AvailabilityFilter]. + bool required = 3; +} + // Filter on person name. message PersonNameFilter { // Required. The person name. For example, "John Smith". @@ -845,5 +866,5 @@ message PersonNameFilter { // [PersonName.structured_name.middle_initial][], // [PersonName.structured_name.family_name][], and // [PersonName.formatted_name][google.cloud.talent.v4beta1.PersonName.formatted_name]. - string person_name = 1; + string person_name = 1 [(google.api.field_behavior) = REQUIRED]; } diff --git a/talent/google/cloud/talent_v4beta1/proto/filters_pb2.py b/talent/google/cloud/talent_v4beta1/proto/filters_pb2.py index 1b8de9175bf7..447d431def21 100644 --- a/talent/google/cloud/talent_v4beta1/proto/filters_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/filters_pb2.py @@ -16,10 +16,12 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.talent_v4beta1.proto import ( common_pb2 as google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2, ) from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 from google.type import date_pb2 as google_dot_type_dot_date__pb2 @@ -35,12 +37,14 @@ "\n\037com.google.cloud.talent.v4beta1B\014FiltersProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n/google/cloud/talent_v4beta1/proto/filters.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x16google/type/date.proto\x1a\x18google/type/latlng.proto\x1a\x1bgoogle/type/timeofday.proto"\xe3\x04\n\x08JobQuery\x12\r\n\x05query\x18\x01 \x01(\t\x12\x11\n\tcompanies\x18\x02 \x03(\t\x12\x45\n\x10location_filters\x18\x03 \x03(\x0b\x32+.google.cloud.talent.v4beta1.LocationFilter\x12@\n\x0ejob_categories\x18\x04 \x03(\x0e\x32(.google.cloud.talent.v4beta1.JobCategory\x12\x42\n\x0e\x63ommute_filter\x18\x05 \x01(\x0b\x32*.google.cloud.talent.v4beta1.CommuteFilter\x12\x1d\n\x15\x63ompany_display_names\x18\x06 \x03(\t\x12L\n\x13\x63ompensation_filter\x18\x07 \x01(\x0b\x32/.google.cloud.talent.v4beta1.CompensationFilter\x12\x1f\n\x17\x63ustom_attribute_filter\x18\x08 \x01(\t\x12\x1b\n\x13\x64isable_spell_check\x18\t \x01(\x08\x12\x45\n\x10\x65mployment_types\x18\n \x03(\x0e\x32+.google.cloud.talent.v4beta1.EmploymentType\x12\x16\n\x0elanguage_codes\x18\x0b \x03(\t\x12G\n\x12publish_time_range\x18\x0c \x01(\x0b\x32+.google.cloud.talent.v4beta1.TimestampRange\x12\x15\n\rexcluded_jobs\x18\r \x03(\t"\xa2\x08\n\x0cProfileQuery\x12\r\n\x05query\x18\x01 \x01(\t\x12\x45\n\x10location_filters\x18\x02 \x03(\x0b\x32+.google.cloud.talent.v4beta1.LocationFilter\x12\x46\n\x11job_title_filters\x18\x03 \x03(\x0b\x32+.google.cloud.talent.v4beta1.JobTitleFilter\x12\x45\n\x10\x65mployer_filters\x18\x04 \x03(\x0b\x32+.google.cloud.talent.v4beta1.EmployerFilter\x12G\n\x11\x65\x64ucation_filters\x18\x05 \x03(\x0b\x32,.google.cloud.talent.v4beta1.EducationFilter\x12?\n\rskill_filters\x18\x06 \x03(\x0b\x32(.google.cloud.talent.v4beta1.SkillFilter\x12Q\n\x16work_experience_filter\x18\x07 \x03(\x0b\x32\x31.google.cloud.talent.v4beta1.WorkExperienceFilter\x12=\n\x0ctime_filters\x18\x08 \x03(\x0b\x32\'.google.cloud.talent.v4beta1.TimeFilter\x12\x32\n\x0ehirable_filter\x18\t \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12T\n\x18\x61pplication_date_filters\x18\n \x03(\x0b\x32\x32.google.cloud.talent.v4beta1.ApplicationDateFilter\x12\x65\n!application_outcome_notes_filters\x18\x0b \x03(\x0b\x32:.google.cloud.talent.v4beta1.ApplicationOutcomeNotesFilter\x12R\n\x17\x61pplication_job_filters\x18\r \x03(\x0b\x32\x31.google.cloud.talent.v4beta1.ApplicationJobFilter\x12\x1f\n\x17\x63ustom_attribute_filter\x18\x0f \x01(\t\x12_\n\x1d\x63\x61ndidate_availability_filter\x18\x10 \x01(\x0b\x32\x38.google.cloud.talent.v4beta1.CandidateAvailabilityFilter\x12J\n\x13person_name_filters\x18\x11 \x03(\x0b\x32-.google.cloud.talent.v4beta1.PersonNameFilter"\xdf\x02\n\x0eLocationFilter\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x13\n\x0bregion_code\x18\x02 \x01(\t\x12$\n\x07lat_lng\x18\x03 \x01(\x0b\x32\x13.google.type.LatLng\x12\x19\n\x11\x64istance_in_miles\x18\x04 \x01(\x01\x12\x61\n\x16telecommute_preference\x18\x05 \x01(\x0e\x32\x41.google.cloud.talent.v4beta1.LocationFilter.TelecommutePreference\x12\x0f\n\x07negated\x18\x06 \x01(\x08"r\n\x15TelecommutePreference\x12&\n"TELECOMMUTE_PREFERENCE_UNSPECIFIED\x10\x00\x12\x18\n\x14TELECOMMUTE_EXCLUDED\x10\x01\x12\x17\n\x13TELECOMMUTE_ALLOWED\x10\x02"\xc0\x03\n\x12\x43ompensationFilter\x12H\n\x04type\x18\x01 \x01(\x0e\x32:.google.cloud.talent.v4beta1.CompensationFilter.FilterType\x12M\n\x05units\x18\x02 \x03(\x0e\x32>.google.cloud.talent.v4beta1.CompensationInfo.CompensationUnit\x12N\n\x05range\x18\x03 \x01(\x0b\x32?.google.cloud.talent.v4beta1.CompensationInfo.CompensationRange\x12\x38\n0include_jobs_with_unspecified_compensation_range\x18\x04 \x01(\x08"\x86\x01\n\nFilterType\x12\x1b\n\x17\x46ILTER_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tUNIT_ONLY\x10\x01\x12\x13\n\x0fUNIT_AND_AMOUNT\x10\x02\x12\x1a\n\x16\x41NNUALIZED_BASE_AMOUNT\x10\x03\x12\x1b\n\x17\x41NNUALIZED_TOTAL_AMOUNT\x10\x04"\xbc\x03\n\rCommuteFilter\x12\x42\n\x0e\x63ommute_method\x18\x01 \x01(\x0e\x32*.google.cloud.talent.v4beta1.CommuteMethod\x12.\n\x11start_coordinates\x18\x02 \x01(\x0b\x32\x13.google.type.LatLng\x12\x32\n\x0ftravel_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12!\n\x19\x61llow_imprecise_addresses\x18\x04 \x01(\x08\x12N\n\x0croad_traffic\x18\x05 \x01(\x0e\x32\x36.google.cloud.talent.v4beta1.CommuteFilter.RoadTrafficH\x00\x12\x30\n\x0e\x64\x65parture_time\x18\x06 \x01(\x0b\x32\x16.google.type.TimeOfDayH\x00"L\n\x0bRoadTraffic\x12\x1c\n\x18ROAD_TRAFFIC_UNSPECIFIED\x10\x00\x12\x10\n\x0cTRAFFIC_FREE\x10\x01\x12\r\n\tBUSY_HOUR\x10\x02\x42\x10\n\x0etraffic_option"4\n\x0eJobTitleFilter\x12\x11\n\tjob_title\x18\x01 \x01(\t\x12\x0f\n\x07negated\x18\x02 \x01(\x08"-\n\x0bSkillFilter\x12\r\n\x05skill\x18\x01 \x01(\t\x12\x0f\n\x07negated\x18\x02 \x01(\x08"\xa1\x02\n\x0e\x45mployerFilter\x12\x10\n\x08\x65mployer\x18\x01 \x01(\t\x12L\n\x04mode\x18\x02 \x01(\x0e\x32>.google.cloud.talent.v4beta1.EmployerFilter.EmployerFilterMode\x12\x0f\n\x07negated\x18\x03 \x01(\x08"\x9d\x01\n\x12\x45mployerFilterMode\x12$\n EMPLOYER_FILTER_MODE_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x41LL_EMPLOYMENT_RECORDS\x10\x01\x12#\n\x1f\x43URRENT_EMPLOYMENT_RECORDS_ONLY\x10\x02\x12 \n\x1cPAST_EMPLOYMENT_RECORDS_ONLY\x10\x03"\x88\x01\n\x0f\x45\x64ucationFilter\x12\x0e\n\x06school\x18\x01 \x01(\t\x12\x16\n\x0e\x66ield_of_study\x18\x02 \x01(\t\x12<\n\x0b\x64\x65gree_type\x18\x03 \x01(\x0e\x32\'.google.cloud.talent.v4beta1.DegreeType\x12\x0f\n\x07negated\x18\x06 \x01(\x08"|\n\x14WorkExperienceFilter\x12\x31\n\x0emin_experience\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0emax_experience\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"c\n\x15\x41pplicationDateFilter\x12%\n\nstart_date\x18\x01 \x01(\x0b\x32\x11.google.type.Date\x12#\n\x08\x65nd_date\x18\x02 \x01(\x0b\x32\x11.google.type.Date"G\n\x1d\x41pplicationOutcomeNotesFilter\x12\x15\n\routcome_notes\x18\x01 \x01(\t\x12\x0f\n\x07negated\x18\x02 \x01(\x08"V\n\x14\x41pplicationJobFilter\x12\x1a\n\x12job_requisition_id\x18\x02 \x01(\t\x12\x11\n\tjob_title\x18\x03 \x01(\t\x12\x0f\n\x07negated\x18\x04 \x01(\x08"\xfc\x01\n\nTimeFilter\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x45\n\ntime_field\x18\x03 \x01(\x0e\x32\x31.google.cloud.talent.v4beta1.TimeFilter.TimeField"I\n\tTimeField\x12\x1a\n\x16TIME_FIELD_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43REATE_TIME\x10\x01\x12\x0f\n\x0bUPDATE_TIME\x10\x02".\n\x1b\x43\x61ndidateAvailabilityFilter\x12\x0f\n\x07negated\x18\x01 \x01(\x08"\'\n\x10PersonNameFilter\x12\x13\n\x0bperson_name\x18\x01 \x01(\tBz\n\x1f\x63om.google.cloud.talent.v4beta1B\x0c\x46iltersProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n/google/cloud/talent_v4beta1/proto/filters.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x16google/type/date.proto\x1a\x18google/type/latlng.proto\x1a\x1bgoogle/type/timeofday.proto"\x80\x05\n\x08JobQuery\x12\r\n\x05query\x18\x01 \x01(\t\x12\x1b\n\x13query_language_code\x18\x0e \x01(\t\x12\x11\n\tcompanies\x18\x02 \x03(\t\x12\x45\n\x10location_filters\x18\x03 \x03(\x0b\x32+.google.cloud.talent.v4beta1.LocationFilter\x12@\n\x0ejob_categories\x18\x04 \x03(\x0e\x32(.google.cloud.talent.v4beta1.JobCategory\x12\x42\n\x0e\x63ommute_filter\x18\x05 \x01(\x0b\x32*.google.cloud.talent.v4beta1.CommuteFilter\x12\x1d\n\x15\x63ompany_display_names\x18\x06 \x03(\t\x12L\n\x13\x63ompensation_filter\x18\x07 \x01(\x0b\x32/.google.cloud.talent.v4beta1.CompensationFilter\x12\x1f\n\x17\x63ustom_attribute_filter\x18\x08 \x01(\t\x12\x1b\n\x13\x64isable_spell_check\x18\t \x01(\x08\x12\x45\n\x10\x65mployment_types\x18\n \x03(\x0e\x32+.google.cloud.talent.v4beta1.EmploymentType\x12\x16\n\x0elanguage_codes\x18\x0b \x03(\t\x12G\n\x12publish_time_range\x18\x0c \x01(\x0b\x32+.google.cloud.talent.v4beta1.TimestampRange\x12\x15\n\rexcluded_jobs\x18\r \x03(\t"\xf5\x08\n\x0cProfileQuery\x12\r\n\x05query\x18\x01 \x01(\t\x12\x45\n\x10location_filters\x18\x02 \x03(\x0b\x32+.google.cloud.talent.v4beta1.LocationFilter\x12\x46\n\x11job_title_filters\x18\x03 \x03(\x0b\x32+.google.cloud.talent.v4beta1.JobTitleFilter\x12\x45\n\x10\x65mployer_filters\x18\x04 \x03(\x0b\x32+.google.cloud.talent.v4beta1.EmployerFilter\x12G\n\x11\x65\x64ucation_filters\x18\x05 \x03(\x0b\x32,.google.cloud.talent.v4beta1.EducationFilter\x12?\n\rskill_filters\x18\x06 \x03(\x0b\x32(.google.cloud.talent.v4beta1.SkillFilter\x12Q\n\x16work_experience_filter\x18\x07 \x03(\x0b\x32\x31.google.cloud.talent.v4beta1.WorkExperienceFilter\x12=\n\x0ctime_filters\x18\x08 \x03(\x0b\x32\'.google.cloud.talent.v4beta1.TimeFilter\x12\x32\n\x0ehirable_filter\x18\t \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12T\n\x18\x61pplication_date_filters\x18\n \x03(\x0b\x32\x32.google.cloud.talent.v4beta1.ApplicationDateFilter\x12\x65\n!application_outcome_notes_filters\x18\x0b \x03(\x0b\x32:.google.cloud.talent.v4beta1.ApplicationOutcomeNotesFilter\x12R\n\x17\x61pplication_job_filters\x18\r \x03(\x0b\x32\x31.google.cloud.talent.v4beta1.ApplicationJobFilter\x12\x1f\n\x17\x63ustom_attribute_filter\x18\x0f \x01(\t\x12\x63\n\x1d\x63\x61ndidate_availability_filter\x18\x10 \x01(\x0b\x32\x38.google.cloud.talent.v4beta1.CandidateAvailabilityFilterB\x02\x18\x01\x12M\n\x14\x61vailability_filters\x18\x12 \x03(\x0b\x32/.google.cloud.talent.v4beta1.AvailabilityFilter\x12J\n\x13person_name_filters\x18\x11 \x03(\x0b\x32-.google.cloud.talent.v4beta1.PersonNameFilter"\xdf\x02\n\x0eLocationFilter\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x13\n\x0bregion_code\x18\x02 \x01(\t\x12$\n\x07lat_lng\x18\x03 \x01(\x0b\x32\x13.google.type.LatLng\x12\x19\n\x11\x64istance_in_miles\x18\x04 \x01(\x01\x12\x61\n\x16telecommute_preference\x18\x05 \x01(\x0e\x32\x41.google.cloud.talent.v4beta1.LocationFilter.TelecommutePreference\x12\x0f\n\x07negated\x18\x06 \x01(\x08"r\n\x15TelecommutePreference\x12&\n"TELECOMMUTE_PREFERENCE_UNSPECIFIED\x10\x00\x12\x18\n\x14TELECOMMUTE_EXCLUDED\x10\x01\x12\x17\n\x13TELECOMMUTE_ALLOWED\x10\x02"\xca\x03\n\x12\x43ompensationFilter\x12M\n\x04type\x18\x01 \x01(\x0e\x32:.google.cloud.talent.v4beta1.CompensationFilter.FilterTypeB\x03\xe0\x41\x02\x12R\n\x05units\x18\x02 \x03(\x0e\x32>.google.cloud.talent.v4beta1.CompensationInfo.CompensationUnitB\x03\xe0\x41\x02\x12N\n\x05range\x18\x03 \x01(\x0b\x32?.google.cloud.talent.v4beta1.CompensationInfo.CompensationRange\x12\x38\n0include_jobs_with_unspecified_compensation_range\x18\x04 \x01(\x08"\x86\x01\n\nFilterType\x12\x1b\n\x17\x46ILTER_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tUNIT_ONLY\x10\x01\x12\x13\n\x0fUNIT_AND_AMOUNT\x10\x02\x12\x1a\n\x16\x41NNUALIZED_BASE_AMOUNT\x10\x03\x12\x1b\n\x17\x41NNUALIZED_TOTAL_AMOUNT\x10\x04"\xcb\x03\n\rCommuteFilter\x12G\n\x0e\x63ommute_method\x18\x01 \x01(\x0e\x32*.google.cloud.talent.v4beta1.CommuteMethodB\x03\xe0\x41\x02\x12\x33\n\x11start_coordinates\x18\x02 \x01(\x0b\x32\x13.google.type.LatLngB\x03\xe0\x41\x02\x12\x37\n\x0ftravel_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12!\n\x19\x61llow_imprecise_addresses\x18\x04 \x01(\x08\x12N\n\x0croad_traffic\x18\x05 \x01(\x0e\x32\x36.google.cloud.talent.v4beta1.CommuteFilter.RoadTrafficH\x00\x12\x30\n\x0e\x64\x65parture_time\x18\x06 \x01(\x0b\x32\x16.google.type.TimeOfDayH\x00"L\n\x0bRoadTraffic\x12\x1c\n\x18ROAD_TRAFFIC_UNSPECIFIED\x10\x00\x12\x10\n\x0cTRAFFIC_FREE\x10\x01\x12\r\n\tBUSY_HOUR\x10\x02\x42\x10\n\x0etraffic_option"9\n\x0eJobTitleFilter\x12\x16\n\tjob_title\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x07negated\x18\x02 \x01(\x08"2\n\x0bSkillFilter\x12\x12\n\x05skill\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x07negated\x18\x02 \x01(\x08"\xa6\x02\n\x0e\x45mployerFilter\x12\x15\n\x08\x65mployer\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12L\n\x04mode\x18\x02 \x01(\x0e\x32>.google.cloud.talent.v4beta1.EmployerFilter.EmployerFilterMode\x12\x0f\n\x07negated\x18\x03 \x01(\x08"\x9d\x01\n\x12\x45mployerFilterMode\x12$\n EMPLOYER_FILTER_MODE_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x41LL_EMPLOYMENT_RECORDS\x10\x01\x12#\n\x1f\x43URRENT_EMPLOYMENT_RECORDS_ONLY\x10\x02\x12 \n\x1cPAST_EMPLOYMENT_RECORDS_ONLY\x10\x03"\x88\x01\n\x0f\x45\x64ucationFilter\x12\x0e\n\x06school\x18\x01 \x01(\t\x12\x16\n\x0e\x66ield_of_study\x18\x02 \x01(\t\x12<\n\x0b\x64\x65gree_type\x18\x03 \x01(\x0e\x32\'.google.cloud.talent.v4beta1.DegreeType\x12\x0f\n\x07negated\x18\x06 \x01(\x08"|\n\x14WorkExperienceFilter\x12\x31\n\x0emin_experience\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0emax_experience\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"c\n\x15\x41pplicationDateFilter\x12%\n\nstart_date\x18\x01 \x01(\x0b\x32\x11.google.type.Date\x12#\n\x08\x65nd_date\x18\x02 \x01(\x0b\x32\x11.google.type.Date"L\n\x1d\x41pplicationOutcomeNotesFilter\x12\x1a\n\routcome_notes\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x07negated\x18\x02 \x01(\x08"V\n\x14\x41pplicationJobFilter\x12\x1a\n\x12job_requisition_id\x18\x02 \x01(\t\x12\x11\n\tjob_title\x18\x03 \x01(\t\x12\x0f\n\x07negated\x18\x04 \x01(\x08"\xfc\x01\n\nTimeFilter\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x45\n\ntime_field\x18\x03 \x01(\x0e\x32\x31.google.cloud.talent.v4beta1.TimeFilter.TimeField"I\n\tTimeField\x12\x1a\n\x16TIME_FIELD_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43REATE_TIME\x10\x01\x12\x0f\n\x0bUPDATE_TIME\x10\x02"2\n\x1b\x43\x61ndidateAvailabilityFilter\x12\x0f\n\x07negated\x18\x01 \x01(\x08:\x02\x18\x01"\xb6\x01\n\x12\x41vailabilityFilter\x12M\n\x0bsignal_type\x18\x01 \x01(\x0e\x32\x33.google.cloud.talent.v4beta1.AvailabilitySignalTypeB\x03\xe0\x41\x02\x12?\n\x05range\x18\x02 \x01(\x0b\x32+.google.cloud.talent.v4beta1.TimestampRangeB\x03\xe0\x41\x02\x12\x10\n\x08required\x18\x03 \x01(\x08",\n\x10PersonNameFilter\x12\x18\n\x0bperson_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x42z\n\x1f\x63om.google.cloud.talent.v4beta1B\x0c\x46iltersProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, + google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, google_dot_type_dot_date__pb2.DESCRIPTOR, @@ -80,8 +84,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2247, - serialized_end=2361, + serialized_start=2426, + serialized_end=2540, ) _sym_db.RegisterEnumDescriptor(_LOCATIONFILTER_TELECOMMUTEPREFERENCE) @@ -125,8 +129,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2678, - serialized_end=2812, + serialized_start=2867, + serialized_end=3001, ) _sym_db.RegisterEnumDescriptor(_COMPENSATIONFILTER_FILTERTYPE) @@ -152,8 +156,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3165, - serialized_end=3241, + serialized_start=3369, + serialized_end=3445, ) _sym_db.RegisterEnumDescriptor(_COMMUTEFILTER_ROADTRAFFIC) @@ -194,8 +198,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3495, - serialized_end=3652, + serialized_start=3714, + serialized_end=3871, ) _sym_db.RegisterEnumDescriptor(_EMPLOYERFILTER_EMPLOYERFILTERMODE) @@ -221,8 +225,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4361, - serialized_end=4434, + serialized_start=4585, + serialized_end=4658, ) _sym_db.RegisterEnumDescriptor(_TIMEFILTER_TIMEFIELD) @@ -252,10 +256,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="query_language_code", + full_name="google.cloud.talent.v4beta1.JobQuery.query_language_code", + index=1, + number=14, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="companies", full_name="google.cloud.talent.v4beta1.JobQuery.companies", - index=1, + index=2, number=2, type=9, cpp_type=9, @@ -273,7 +295,7 @@ _descriptor.FieldDescriptor( name="location_filters", full_name="google.cloud.talent.v4beta1.JobQuery.location_filters", - index=2, + index=3, number=3, type=11, cpp_type=10, @@ -291,7 +313,7 @@ _descriptor.FieldDescriptor( name="job_categories", full_name="google.cloud.talent.v4beta1.JobQuery.job_categories", - index=3, + index=4, number=4, type=14, cpp_type=8, @@ -309,7 +331,7 @@ _descriptor.FieldDescriptor( name="commute_filter", full_name="google.cloud.talent.v4beta1.JobQuery.commute_filter", - index=4, + index=5, number=5, type=11, cpp_type=10, @@ -327,7 +349,7 @@ _descriptor.FieldDescriptor( name="company_display_names", full_name="google.cloud.talent.v4beta1.JobQuery.company_display_names", - index=5, + index=6, number=6, type=9, cpp_type=9, @@ -345,7 +367,7 @@ _descriptor.FieldDescriptor( name="compensation_filter", full_name="google.cloud.talent.v4beta1.JobQuery.compensation_filter", - index=6, + index=7, number=7, type=11, cpp_type=10, @@ -363,7 +385,7 @@ _descriptor.FieldDescriptor( name="custom_attribute_filter", full_name="google.cloud.talent.v4beta1.JobQuery.custom_attribute_filter", - index=7, + index=8, number=8, type=9, cpp_type=9, @@ -381,7 +403,7 @@ _descriptor.FieldDescriptor( name="disable_spell_check", full_name="google.cloud.talent.v4beta1.JobQuery.disable_spell_check", - index=8, + index=9, number=9, type=8, cpp_type=7, @@ -399,7 +421,7 @@ _descriptor.FieldDescriptor( name="employment_types", full_name="google.cloud.talent.v4beta1.JobQuery.employment_types", - index=9, + index=10, number=10, type=14, cpp_type=8, @@ -417,7 +439,7 @@ _descriptor.FieldDescriptor( name="language_codes", full_name="google.cloud.talent.v4beta1.JobQuery.language_codes", - index=10, + index=11, number=11, type=9, cpp_type=9, @@ -435,7 +457,7 @@ _descriptor.FieldDescriptor( name="publish_time_range", full_name="google.cloud.talent.v4beta1.JobQuery.publish_time_range", - index=11, + index=12, number=12, type=11, cpp_type=10, @@ -453,7 +475,7 @@ _descriptor.FieldDescriptor( name="excluded_jobs", full_name="google.cloud.talent.v4beta1.JobQuery.excluded_jobs", - index=12, + index=13, number=13, type=9, cpp_type=9, @@ -477,8 +499,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=335, - serialized_end=946, + serialized_start=402, + serialized_end=1042, ) @@ -738,13 +760,31 @@ containing_type=None, is_extension=False, extension_scope=None, + serialized_options=_b("\030\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="availability_filters", + full_name="google.cloud.talent.v4beta1.ProfileQuery.availability_filters", + index=14, + number=18, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="person_name_filters", full_name="google.cloud.talent.v4beta1.ProfileQuery.person_name_filters", - index=14, + index=15, number=17, type=11, cpp_type=10, @@ -768,8 +808,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=949, - serialized_end=2007, + serialized_start=1045, + serialized_end=2186, ) @@ -897,8 +937,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2010, - serialized_end=2361, + serialized_start=2189, + serialized_end=2540, ) @@ -924,7 +964,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -942,7 +982,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -990,8 +1030,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2364, - serialized_end=2812, + serialized_start=2543, + serialized_end=3001, ) @@ -1017,7 +1057,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1035,7 +1075,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1053,7 +1093,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1127,8 +1167,8 @@ fields=[], ) ], - serialized_start=2815, - serialized_end=3259, + serialized_start=3004, + serialized_end=3463, ) @@ -1154,7 +1194,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1184,8 +1224,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3261, - serialized_end=3313, + serialized_start=3465, + serialized_end=3522, ) @@ -1211,7 +1251,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1241,8 +1281,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3315, - serialized_end=3360, + serialized_start=3524, + serialized_end=3574, ) @@ -1268,7 +1308,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1316,8 +1356,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3363, - serialized_end=3652, + serialized_start=3577, + serialized_end=3871, ) @@ -1409,8 +1449,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3655, - serialized_end=3791, + serialized_start=3874, + serialized_end=4010, ) @@ -1466,8 +1506,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3793, - serialized_end=3917, + serialized_start=4012, + serialized_end=4136, ) @@ -1523,8 +1563,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3919, - serialized_end=4018, + serialized_start=4138, + serialized_end=4237, ) @@ -1550,7 +1590,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1580,8 +1620,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4020, - serialized_end=4091, + serialized_start=4239, + serialized_end=4315, ) @@ -1655,8 +1695,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4093, - serialized_end=4179, + serialized_start=4317, + serialized_end=4403, ) @@ -1730,8 +1770,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4182, - serialized_end=4434, + serialized_start=4406, + serialized_end=4658, ) @@ -1764,13 +1804,88 @@ extensions=[], nested_types=[], enum_types=[], + serialized_options=_b("\030\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4660, + serialized_end=4710, +) + + +_AVAILABILITYFILTER = _descriptor.Descriptor( + name="AvailabilityFilter", + full_name="google.cloud.talent.v4beta1.AvailabilityFilter", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="signal_type", + full_name="google.cloud.talent.v4beta1.AvailabilityFilter.signal_type", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="range", + full_name="google.cloud.talent.v4beta1.AvailabilityFilter.range", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="required", + full_name="google.cloud.talent.v4beta1.AvailabilityFilter.required", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4436, - serialized_end=4482, + serialized_start=4713, + serialized_end=4895, ) @@ -1796,7 +1911,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -1808,8 +1923,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4484, - serialized_end=4523, + serialized_start=4897, + serialized_end=4941, ) _JOBQUERY.fields_by_name["location_filters"].message_type = _LOCATIONFILTER @@ -1854,6 +1969,7 @@ _PROFILEQUERY.fields_by_name[ "candidate_availability_filter" ].message_type = _CANDIDATEAVAILABILITYFILTER +_PROFILEQUERY.fields_by_name["availability_filters"].message_type = _AVAILABILITYFILTER _PROFILEQUERY.fields_by_name["person_name_filters"].message_type = _PERSONNAMEFILTER _LOCATIONFILTER.fields_by_name[ "lat_lng" @@ -1927,6 +2043,16 @@ ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _TIMEFILTER.fields_by_name["time_field"].enum_type = _TIMEFILTER_TIMEFIELD _TIMEFILTER_TIMEFIELD.containing_type = _TIMEFILTER +_AVAILABILITYFILTER.fields_by_name[ + "signal_type" +].enum_type = ( + google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2._AVAILABILITYSIGNALTYPE +) +_AVAILABILITYFILTER.fields_by_name[ + "range" +].message_type = ( + google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2._TIMESTAMPRANGE +) DESCRIPTOR.message_types_by_name["JobQuery"] = _JOBQUERY DESCRIPTOR.message_types_by_name["ProfileQuery"] = _PROFILEQUERY DESCRIPTOR.message_types_by_name["LocationFilter"] = _LOCATIONFILTER @@ -1946,6 +2072,7 @@ DESCRIPTOR.message_types_by_name[ "CandidateAvailabilityFilter" ] = _CANDIDATEAVAILABILITYFILTER +DESCRIPTOR.message_types_by_name["AvailabilityFilter"] = _AVAILABILITYFILTER DESCRIPTOR.message_types_by_name["PersonNameFilter"] = _PERSONNAMEFILTER _sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -1955,30 +2082,36 @@ dict( DESCRIPTOR=_JOBQUERY, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - The query required to perform a search query. + __doc__="""The query required to perform a search query. Attributes: query: - Optional. The query string that matches against the job title, + The query string that matches against the job title, description, and location fields. The maximum number of allowed characters is 255. + query_language_code: + The language code of + [query][google.cloud.talent.v4beta1.JobQuery.query]. For + example, "en-US". This field helps to better interpret the + query. If a value isn't specified, the query language code is + automatically detected, which may not be accurate. Language + code should be in BCP-47 format, such as "en-US" or "sr-Latn". + For more information, see `Tags for Identifying Languages + `__. companies: - Optional. This filter specifies the company entities to search - against. If a value isn't specified, jobs are searched for - against all companies. If multiple values are specified, jobs - are searched against the companies specified. The format is " - projects/{project\_id}/tenants/{tenant\_id}/companies/{company - \_id}", for example, "projects/api-test- - project/tenants/foo/companies/bar". Tenant id is optional and - the default tenant is used if unspecified, for example, - "projects/api-test-project/companies/bar". At most 20 company + This filter specifies the company entities to search against. + If a value isn't specified, jobs are searched for against all + companies. If multiple values are specified, jobs are + searched against the companies specified. The format is "proj + ects/{project\_id}/tenants/{tenant\_id}/companies/{company\_id + }". For example, "projects/foo/tenants/bar/companies/baz". If + tenant id is unspecified, the default tenant is used. For + example, "projects/foo/companies/bar". At most 20 company filters are allowed. location_filters: - Optional. The location filter specifies geo-regions containing - the jobs to search against. See + The location filter specifies geo-regions containing the jobs + to search against. See [LocationFilter][google.cloud.talent.v4beta1.LocationFilter] for more information. If a location value isn't specified, jobs fitting the other search criteria are retrieved @@ -1990,83 +2123,80 @@ distance is used for all locations. At most 5 location filters are allowed. job_categories: - Optional. The category filter specifies the categories of jobs - to search against. See + The category filter specifies the categories of jobs to search + against. See [JobCategory][google.cloud.talent.v4beta1.JobCategory] for more information. If a value isn't specified, jobs from any category are searched against. If multiple values are specified, jobs from any of the specified categories are searched against. commute_filter: - Optional. Allows filtering jobs by commute time with different - travel methods (for example, driving or public transit). - Note: This only works when you specify a + Allows filtering jobs by commute time with different travel + methods (for example, driving or public transit). Note: This + only works when you specify a [CommuteMethod][google.cloud.talent.v4beta1.CommuteMethod]. In this case, [location\_filters][google.cloud.talent.v4beta1.Job Query.location\_filters] is ignored. Currently we don't support sorting by commute time. company_display_names: - Optional. This filter specifies the exact company [Company.dis - play\_name][google.cloud.talent.v4beta1.Company.display\_name] - of the jobs to search against. If a value isn't specified, - jobs within the search results are associated with any - company. If multiple values are specified, jobs within the - search results may be associated with any of the specified - companies. At most 20 company display name filters are - allowed. + This filter specifies the exact company [Company.display\_name + ][google.cloud.talent.v4beta1.Company.display\_name] of the + jobs to search against. If a value isn't specified, jobs + within the search results are associated with any company. If + multiple values are specified, jobs within the search results + may be associated with any of the specified companies. At + most 20 company display name filters are allowed. compensation_filter: - Optional. This search filter is applied only to [Job.compensat - ion\_info][google.cloud.talent.v4beta1.Job.compensation\_info] - . For example, if the filter is specified as "Hourly job with - per-hour compensation > $15", only jobs meeting these criteria - are searched. If a filter isn't defined, all open jobs are + This search filter is applied only to [Job.compensation\_info] + [google.cloud.talent.v4beta1.Job.compensation\_info]. For + example, if the filter is specified as "Hourly job with per- + hour compensation > $15", only jobs meeting these criteria are + searched. If a filter isn't defined, all open jobs are searched. custom_attribute_filter: - Optional. This filter specifies a structured syntax to match - against the [Job.custom\_attributes][google.cloud.talent.v4bet - a1.Job.custom\_attributes] marked as ``filterable``. The - syntax for this expression is a subset of SQL syntax. - Supported operators are: ``=``, ``!=``, ``<``, ``<=``, ``>``, - and ``>=`` where the left of the operator is a custom field - key and the right of the operator is a number or a quoted - string. You must escape backslash (\\) and quote (") - characters. Supported functions are ``LOWER([field_name])`` - to perform a case insensitive match and - ``EMPTY([field_name])`` to filter on the existence of a key. - Boolean expressions (AND/OR/NOT) are supported up to 3 levels - of nesting (for example, "((A AND B AND C) OR NOT D) AND E"), - a maximum of 100 comparisons or functions are allowed in the - expression. The expression must be < 6000 bytes in length. - Sample Query: ``(LOWER(driving_license)="class \"a\"" OR - EMPTY(driving_license)) AND driving_years > 10`` + This filter specifies a structured syntax to match against the + [Job.custom\_attributes][google.cloud.talent.v4beta1.Job.custo + m\_attributes] marked as ``filterable``. The syntax for this + expression is a subset of SQL syntax. Supported operators + are: ``=``, ``!=``, ``<``, ``<=``, ``>``, and ``>=`` where the + left of the operator is a custom field key and the right of + the operator is a number or a quoted string. You must escape + backslash (\\) and quote (") characters. Supported functions + are ``LOWER([field_name])`` to perform a case insensitive + match and ``EMPTY([field_name])`` to filter on the existence + of a key. Boolean expressions (AND/OR/NOT) are supported up + to 3 levels of nesting (for example, "((A AND B AND C) OR NOT + D) AND E"), a maximum of 100 comparisons or functions are + allowed in the expression. The expression must be < 6000 bytes + in length. Sample Query: ``(LOWER(driving_license)="class + \"a\"" OR EMPTY(driving_license)) AND driving_years > 10`` disable_spell_check: - Optional. This flag controls the spell-check feature. If - false, the service attempts to correct a misspelled query, for - example, "enginee" is corrected to "engineer". Defaults to - false: a spell check is performed. + This flag controls the spell-check feature. If false, the + service attempts to correct a misspelled query, for example, + "enginee" is corrected to "engineer". Defaults to false: a + spell check is performed. employment_types: - Optional. The employment type filter specifies the employment - type of jobs to search against, such as [EmploymentType.FULL\_ - TIME][google.cloud.talent.v4beta1.EmploymentType.FULL\_TIME]. - If a value isn't specified, jobs in the search results - includes any employment type. If multiple values are - specified, jobs in the search results include any of the - specified employment types. + The employment type filter specifies the employment type of + jobs to search against, such as [EmploymentType.FULL\_TIME][go + ogle.cloud.talent.v4beta1.EmploymentType.FULL\_TIME]. If a + value isn't specified, jobs in the search results includes any + employment type. If multiple values are specified, jobs in + the search results include any of the specified employment + types. language_codes: - Optional. This filter specifies the locale of jobs to search - against, for example, "en-US". If a value isn't specified, - the search results can contain jobs in any locale. Language - codes should be in BCP-47 format, such as "en-US" or "sr- - Latn". For more information, see `Tags for Identifying - Languages `__. At most 10 - language code filters are allowed. + This filter specifies the locale of jobs to search against, + for example, "en-US". If a value isn't specified, the search + results can contain jobs in any locale. Language codes should + be in BCP-47 format, such as "en-US" or "sr-Latn". For more + information, see `Tags for Identifying Languages + `__. At most 10 language + code filters are allowed. publish_time_range: - Optional. Jobs published within a range specified by this - filter are searched against. + Jobs published within a range specified by this filter are + searched against. excluded_jobs: - Optional. This filter specifies a list of job names to be - excluded during search. At most 400 excluded job names are - allowed. + This filter specifies a list of job names to be excluded + during search. At most 400 excluded job names are allowed. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.JobQuery) ), @@ -2084,38 +2214,38 @@ Attributes: query: - Optional. Keywords to match any text fields of profiles. For - example, "software engineer in Palo Alto". + Keywords to match any text fields of profiles. For example, + "software engineer in Palo Alto". location_filters: - Optional. The location filter specifies geo-regions containing - the profiles to search against. One of [LocationFilter.addres - s][google.cloud.talent.v4beta1.LocationFilter.address] or [Loc - ationFilter.lat\_lng][google.cloud.talent.v4beta1.LocationFilt - er.lat\_lng] must be provided or an error is thrown. If both [ - LocationFilter.address][google.cloud.talent.v4beta1.LocationFi - lter.address] and [LocationFilter.lat\_lng][google.cloud.talen - t.v4beta1.LocationFilter.lat\_lng] are provided, an error is + The location filter specifies geo-regions containing the + profiles to search against. One of [LocationFilter.address][g + oogle.cloud.talent.v4beta1.LocationFilter.address] or [Locatio + nFilter.lat\_lng][google.cloud.talent.v4beta1.LocationFilter.l + at\_lng] must be provided or an error is thrown. If both [Loca + tionFilter.address][google.cloud.talent.v4beta1.LocationFilter + .address] and [LocationFilter.lat\_lng][google.cloud.talent.v4 + beta1.LocationFilter.lat\_lng] are provided, an error is thrown. The following logic is used to determine which - locations in the profile to filter against: 1. All of the - profile's geocoded [Profile.addresses][google.cloud.talent.v4b - eta1.Profile.addresses] where + locations in the profile to filter against: 1. All of the + profile's geocoded [Profile.addresses][google.cloud.talent. + v4beta1.Profile.addresses] where [Address.usage][google.cloud.talent.v4beta1.Address.usage] is PERSONAL and [Address.current][google.cloud.talent.v4beta1.Address.current] - is true. 2. If the above set of locations is empty, all of the - profile's geocoded [Profile.addresses][google.cloud.talent.v4b - eta1.Profile.addresses] where + is true. 2. If the above set of locations is empty, all of + the profile's geocoded [Profile.addresses][google.cloud.tal + ent.v4beta1.Profile.addresses] where [Address.usage][google.cloud.talent.v4beta1.Address.usage] is CONTACT\_INFO\_USAGE\_UNSPECIFIED and [Address.current][google.cloud.talent.v4beta1.Address.current] - is true. 3. If the above set of locations is empty, all of the - profile's geocoded [Profile.addresses][google.cloud.talent.v4b - eta1.Profile.addresses] where + is true. 3. If the above set of locations is empty, all of + the profile's geocoded [Profile.addresses][google.cloud.tal + ent.v4beta1.Profile.addresses] where [Address.usage][google.cloud.talent.v4beta1.Address.usage] is PERSONAL or CONTACT\_INFO\_USAGE\_UNSPECIFIED and [Address.current][google.cloud.talent.v4beta1.Address.current] - is not set. This means that any profiles without any [Profile - .addresses][google.cloud.talent.v4beta1.Profile.addresses] + is not set. This means that any profiles without any [Prof + ile.addresses][google.cloud.talent.v4beta1.Profile.addresses] that match any of the above criteria will not be included in a search with location filter. Furthermore, any [Profile.address es][google.cloud.talent.v4beta1.Profile.addresses] where @@ -2178,105 +2308,120 @@ \_in\_miles][google.cloud.talent.v4beta1.LocationFilter.distan ce\_in\_miles] is negative, an error is thrown. job_title_filters: - Optional. Job title filter specifies job titles of profiles to - match on. If a job title isn't specified, profiles with any - titles are retrieved. If multiple values are specified, - profiles are retrieved with any of the specified job titles. - If [JobTitleFilter.negated][google.cloud.talent.v4beta1.JobTit - leFilter.negated] is specified, the result won't contain - profiles with the job titles. For example, search for - profiles with a job title "Product Manager". + Job title filter specifies job titles of profiles to match on. + If a job title isn't specified, profiles with any titles are + retrieved. If multiple values are specified, profiles are + retrieved with any of the specified job titles. If [JobTitleF + ilter.negated][google.cloud.talent.v4beta1.JobTitleFilter.nega + ted] is specified, the result won't contain profiles with the + job titles. For example, search for profiles with a job title + "Product Manager". employer_filters: - Optional. Employer filter specifies employers of profiles to - match on. If an employer filter isn't specified, profiles - with any employers are retrieved. If multiple employer - filters are specified, profiles with any matching employers - are retrieved. If [EmployerFilter.negated][google.cloud.talen - t.v4beta1.EmployerFilter.negated] is specified, the result - won't contain profiles that match the employers. For example, - search for profiles that have working experience at "Google - LLC". + Employer filter specifies employers of profiles to match on. + If an employer filter isn't specified, profiles with any + employers are retrieved. If multiple employer filters are + specified, profiles with any matching employers are retrieved. + If [EmployerFilter.negated][google.cloud.talent.v4beta1.Employ + erFilter.negated] is specified, the result won't contain + profiles that match the employers. For example, search for + profiles that have working experience at "Google LLC". education_filters: - Optional. Education filter specifies education of profiles to - match on. If an education filter isn't specified, profiles - with any education are retrieved. If multiple education - filters are specified, profiles that match any education - filters are retrieved. If [EducationFilter.negated][google.cl - oud.talent.v4beta1.EducationFilter.negated] is specified, the - result won't contain profiles that match the educations. For - example, search for profiles with a master degree. + Education filter specifies education of profiles to match on. + If an education filter isn't specified, profiles with any + education are retrieved. If multiple education filters are + specified, profiles that match any education filters are + retrieved. If [EducationFilter.negated][google.cloud.talent.v + 4beta1.EducationFilter.negated] is specified, the result won't + contain profiles that match the educations. For example, + search for profiles with a master degree. skill_filters: - Optional. Skill filter specifies skill of profiles to match - on. If a skill filter isn't specified, profiles with any - skills are retrieved. If multiple skill filters are - specified, profiles that match any skill filters are - retrieved. If [SkillFilter.negated][google.cloud.talent.v4bet - a1.SkillFilter.negated] is specified, the result won't contain - profiles that match the skills. For example, search for - profiles that have "Java" and "Python" in skill list. + Skill filter specifies skill of profiles to match on. If a + skill filter isn't specified, profiles with any skills are + retrieved. If multiple skill filters are specified, profiles + that match any skill filters are retrieved. If [SkillFilter.n + egated][google.cloud.talent.v4beta1.SkillFilter.negated] is + specified, the result won't contain profiles that match the + skills. For example, search for profiles that have "Java" and + "Python" in skill list. work_experience_filter: - Optional. Work experience filter specifies the total working - experience of profiles to match on. If a work experience - filter isn't specified, profiles with any professional - experience are retrieved. If multiple work experience filters - are specified, profiles that match any work experience filters - are retrieved. For example, search for profiles with 10 years - of work experience. + Work experience filter specifies the total working experience + of profiles to match on. If a work experience filter isn't + specified, profiles with any professional experience are + retrieved. If multiple work experience filters are specified, + profiles that match any work experience filters are retrieved. + For example, search for profiles with 10 years of work + experience. time_filters: - Optional. Time filter specifies the create/update timestamp of - the profiles to match on. For example, search for profiles + Time filter specifies the create/update timestamp of the + profiles to match on. For example, search for profiles created since "2018-1-1". hirable_filter: - Optional. The hirable filter specifies the profile's hirable - status to match on. + The hirable filter specifies the profile's hirable status to + match on. application_date_filters: - Optional. The application date filters specify application - date ranges to match on. + The application date filters specify application date ranges + to match on. application_outcome_notes_filters: - Optional. The application outcome notes filters specify the - notes for the outcome of the job application. + The application outcome notes filters specify the notes for + the outcome of the job application. application_job_filters: - Optional. The application job filters specify the job applied - for in the application. + The application job filters specify the job applied for in the + application. custom_attribute_filter: - Optional. This filter specifies a structured syntax to match - against the [Profile.custom\_attributes][google.cloud.talent.v - 4beta1.Profile.custom\_attributes] that are marked as - ``filterable``. The syntax for this expression is a subset of - Google SQL syntax. String custom attributes: supported - operators are =, != where the left of the operator is a custom - field key and the right of the operator is a string - (surrounded by quotes) value. Numeric custom attributes: - Supported operators are '>', '<' or '=' operators where the - left of the operator is a custom field key and the right of - the operator is a numeric value. Supported functions are - LOWER() to perform case insensitive match and EMPTY() to - filter on the existence of a key. Boolean expressions - (AND/OR/NOT) are supported up to 3 levels of nesting (for - example "((A AND B AND C) OR NOT D) AND E"), and there can be - a maximum of 50 comparisons/functions in the expression. The - expression must be < 2000 characters in length. Sample Query: - (key1 = "TEST" OR LOWER(key1)="test" OR NOT EMPTY(key1)) + This filter specifies a structured syntax to match against the + [Profile.custom\_attributes][google.cloud.talent.v4beta1.Profi + le.custom\_attributes] that are marked as ``filterable``. The + syntax for this expression is a subset of Google SQL syntax. + String custom attributes: supported operators are =, != where + the left of the operator is a custom field key and the right + of the operator is a string (surrounded by quotes) value. + Numeric custom attributes: Supported operators are '>', '<' or + '=' operators where the left of the operator is a custom field + key and the right of the operator is a numeric value. + Supported functions are LOWER() to perform case insensitive + match and EMPTY() to filter on the existence of a key. + Boolean expressions (AND/OR/NOT) are supported up to 3 levels + of nesting (for example "((A AND B AND C) OR NOT D) AND E"), + and there can be a maximum of 50 comparisons/functions in the + expression. The expression must be < 2000 characters in + length. Sample Query: (key1 = "TEST" OR LOWER(key1)="test" OR + NOT EMPTY(key1)) candidate_availability_filter: - Optional. The candidate availability filter which filters - based on availability signals. Signal 1: Number of days since - most recent job application. See [Availability.JobApplicationA - vailabilitySignal][google.cloud.talent.v4beta1.Availability.Jo - bApplicationAvailabilitySignal] for the details of this - signal. Signal 2: Number of days since last profile update. - See [Availability.ProfileUpdateAvailabilitySignal][google.clou - d.talent.v4beta1.Availability.ProfileUpdateAvailabilitySignal] - for the details of this signal. The candidate availability - filter helps a recruiter understand if a specific candidate is - likely to be actively seeking new job opportunities based on - an aggregated set of signals. Specifically, the intent is NOT - to indicate the candidate's potential qualification / interest - / close ability for a specific job. + Deprecated. Use availability\_filters instead. The candidate + availability filter which filters based on availability + signals. Signal 1: Number of days since most recent job + application. See [Availability.JobApplicationAvailabilitySigna + l][google.cloud.talent.v4beta1.Availability.JobApplicationAvai + labilitySignal] for the details of this signal. Signal 2: + Number of days since last profile update. See [Availability.Pr + ofileUpdateAvailabilitySignal][google.cloud.talent.v4beta1.Ava + ilability.ProfileUpdateAvailabilitySignal] for the details of + this signal. The candidate availability filter helps a + recruiter understand if a specific candidate is likely to be + actively seeking new job opportunities based on an aggregated + set of signals. Specifically, the intent is NOT to indicate + the candidate's potential qualification / interest / close + ability for a specific job. + availability_filters: + The availability filter which filters based on [Profile.availa + bility\_signals][google.cloud.talent.v4beta1.Profile.availabil + ity\_signals]. The availability filter helps a recruiter + understand if a specific candidate is likely to be actively + seeking new job opportunities based on an aggregated set of + signals. Specifically, the intent is NOT to indicate the + candidate's potential qualification / interest / close ability + for a specific job. There can be at most one [AvailabilityFil + ter][google.cloud.talent.v4beta1.AvailabilityFilter] per [sign + al\_type][google.cloud.talent.v4beta1.AvailabilityFilter.signa + l\_type]. If there are multiple [AvailabilityFilter][google.cl + oud.talent.v4beta1.AvailabilityFilter] for a [signal\_type][go + ogle.cloud.talent.v4beta1.AvailabilityFilter.signal\_type], an + error is thrown. person_name_filters: - Optional. Person name filter specifies person name of profiles - to match on. If multiple person name filters are specified, - profiles that match any person name filters are retrieved. - For example, search for profiles of candidates with name "John + Person name filter specifies person name of profiles to match + on. If multiple person name filters are specified, profiles + that match any person name filters are retrieved. For + example, search for profiles of candidates with name "John Smith". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.ProfileQuery) @@ -2290,45 +2435,40 @@ dict( DESCRIPTOR=_LOCATIONFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Geographic region of the search. + __doc__="""Geographic region of the search. Attributes: address: - Optional. The address name, such as "Mountain View" or "Bay - Area". + The address name, such as "Mountain View" or "Bay Area". region_code: - Optional. CLDR region code of the country/region of the - address. This is used to address ambiguity of the user-input - location, for example, "Liverpool" against "Liverpool, NY, US" - or "Liverpool, UK". Set this field to bias location - resolution toward a specific country or territory. If this - field is not set, application behavior is biased toward the - United States by default. See http://cldr.unicode.org/ and ht - tp://www.unicode.org/cldr/charts/30/supplemental/territory\_in - formation.html for details. Example: "CH" for Switzerland. - Note that this filter is not applicable for Profile Search - related queries. + CLDR region code of the country/region of the address. This is + used to address ambiguity of the user-input location, for + example, "Liverpool" against "Liverpool, NY, US" or + "Liverpool, UK". Set this field to bias location resolution + toward a specific country or territory. If this field is not + set, application behavior is biased toward the United States + by default. See http://cldr.unicode.org/ and http://www.unico + de.org/cldr/charts/30/supplemental/territory\_information.html + for details. Example: "CH" for Switzerland. Note that this + filter is not applicable for Profile Search related queries. lat_lng: - Optional. The latitude and longitude of the geographic center - to search from. This field is ignored if ``address`` is - provided. + The latitude and longitude of the geographic center to search + from. This field is ignored if ``address`` is provided. distance_in_miles: - Optional. The distance\_in\_miles is applied when the location - being searched for is identified as a city or smaller. This - field is ignored if the location being searched for is a state - or larger. + The distance\_in\_miles is applied when the location being + searched for is identified as a city or smaller. This field is + ignored if the location being searched for is a state or + larger. telecommute_preference: - Optional. Allows the client to return jobs without a set - location, specifically, telecommuting jobs (telecommuting is - considered by the service as a special location. [Job.posting\ - _region][google.cloud.talent.v4beta1.Job.posting\_region] - indicates if a job permits telecommuting. If this field is set - to [TelecommutePreference.TELECOMMUTE\_ALLOWED][google.cloud.t - alent.v4beta1.LocationFilter.TelecommutePreference.TELECOMMUTE - \_ALLOWED], telecommuting jobs are searched, and + Allows the client to return jobs without a set location, + specifically, telecommuting jobs (telecommuting is considered + by the service as a special location. [Job.posting\_region][go + ogle.cloud.talent.v4beta1.Job.posting\_region] indicates if a + job permits telecommuting. If this field is set to [Telecommut + ePreference.TELECOMMUTE\_ALLOWED][google.cloud.talent.v4beta1. + LocationFilter.TelecommutePreference.TELECOMMUTE\_ALLOWED], + telecommuting jobs are searched, and [address][google.cloud.talent.v4beta1.LocationFilter.address] and [lat\_lng][google.cloud.talent.v4beta1.LocationFilter.lat\ _lng] are ignored. If not set or set to [TelecommutePreference @@ -2343,9 +2483,9 @@ less relevant than other jobs in the search response. This field is only used for job search requests. negated: - Optional. Whether to apply negation to the filter so profiles - matching the filter are excluded. Currently only supported in - profile search. + Whether to apply negation to the filter so profiles matching + the filter are excluded. Currently only supported in profile + search. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.LocationFilter) ), @@ -2358,9 +2498,7 @@ dict( DESCRIPTOR=_COMPENSATIONFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Filter on job compensation type and amount. + __doc__="""Filter on job compensation type and amount. Attributes: @@ -2371,10 +2509,10 @@ nsationInfo.CompensationUnit][google.cloud.talent.v4beta1.Comp ensationInfo.CompensationUnit]. range: - Optional. Compensation range. + Compensation range. include_jobs_with_unspecified_compensation_range: - Optional. If set to true, jobs with unspecified compensation - range fields are included. + If set to true, jobs with unspecified compensation range + fields are included. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.CompensationFilter) ), @@ -2387,36 +2525,34 @@ dict( DESCRIPTOR=_COMMUTEFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Parameters needed for commute search. + __doc__="""Parameters needed for commute search. Attributes: commute_method: - Required. The method of transportation for which to calculate - the commute time. + Required. The method of transportation to calculate the + commute time for. start_coordinates: - Required. The latitude and longitude of the location from - which to calculate the commute time. + Required. The latitude and longitude of the location to + calculate the commute time from. travel_duration: Required. The maximum travel time in seconds. The maximum allowed value is ``3600s`` (one hour). Format is ``123s``. allow_imprecise_addresses: - Optional. If ``true``, jobs without street level addresses may - also be returned. For city level addresses, the city center is - used. For state and coarser level addresses, text matching is - used. If this field is set to ``false`` or isn't specified, - only jobs that include street level addresses will be returned - by commute search. + If ``true``, jobs without street level addresses may also be + returned. For city level addresses, the city center is used. + For state and coarser level addresses, text matching is used. + If this field is set to ``false`` or isn't specified, only + jobs that include street level addresses will be returned by + commute search. traffic_option: - Optional. Traffic factor to take into account while searching - by commute. + Traffic factor to take into account while searching by + commute. road_traffic: - Optional. Specifies the traffic density to use when - calculating commute time. + Specifies the traffic density to use when calculating commute + time. departure_time: - Optional. The departure time used to calculate traffic impact, + The departure time used to calculate traffic impact, represented as [google.type.TimeOfDay][google.type.TimeOfDay] in local time zone. Currently traffic model is restricted to hour level resolution. @@ -2432,18 +2568,16 @@ dict( DESCRIPTOR=_JOBTITLEFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Job title of the search. + __doc__="""Job title of the search. Attributes: job_title: - Required. The job title, for example, "Software engineer", or + Required. The job title. For example, "Software engineer", or "Product manager". negated: - Optional. Whether to apply negation to the filter so profiles - matching the filter are excluded. + Whether to apply negation to the filter so profiles matching + the filter are excluded. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.JobTitleFilter) ), @@ -2456,9 +2590,7 @@ dict( DESCRIPTOR=_SKILLFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Skill filter of the search. + __doc__="""Skill filter of the search. Attributes: @@ -2466,8 +2598,8 @@ Required. The skill name. For example, "java", "j2ee", and so on. negated: - Optional. Whether to apply negation to the filter so profiles - matching the filter are excluded. + Whether to apply negation to the filter so profiles matching + the filter are excluded. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.SkillFilter) ), @@ -2480,9 +2612,7 @@ dict( DESCRIPTOR=_EMPLOYERFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Employer filter of the search. + __doc__="""Employer filter of the search. Attributes: @@ -2490,14 +2620,13 @@ Required. The name of the employer, for example "Google", "Alphabet". mode: - Optional. Define set of [EmploymentRecord][google.cloud.talent - .v4beta1.EmploymentRecord]s to search against. Defaults to [E - mployerFilterMode.ALL\_EMPLOYMENT\_RECORDS][google.cloud.talen - t.v4beta1.EmployerFilter.EmployerFilterMode.ALL\_EMPLOYMENT\_R - ECORDS]. + Define set of [EmploymentRecord][google.cloud.talent.v4beta1.E + mploymentRecord]s to search against. Defaults to [EmployerFil + terMode.ALL\_EMPLOYMENT\_RECORDS][google.cloud.talent.v4beta1. + EmployerFilter.EmployerFilterMode.ALL\_EMPLOYMENT\_RECORDS]. negated: - Optional. Whether to apply negation to the filter so profiles - matching the filter is excluded. + Whether to apply negation to the filter so profiles matching + the filter is excluded. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.EmployerFilter) ), @@ -2510,27 +2639,25 @@ dict( DESCRIPTOR=_EDUCATIONFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Education filter of the search. + __doc__="""Education filter of the search. Attributes: school: - Optional. The school name. For example "MIT", "University of - California, Berkeley". + The school name. For example "MIT", "University of California, + Berkeley". field_of_study: - Optional. The field of study. This is to search against value - provided in [Degree.fields\_of\_study][google.cloud.talent.v4b - eta1.Degree.fields\_of\_study]. For example "Computer - Science", "Mathematics". + The field of study. This is to search against value provided + in [Degree.fields\_of\_study][google.cloud.talent.v4beta1.Degr + ee.fields\_of\_study]. For example "Computer Science", + "Mathematics". degree_type: - Optional. Education degree in ISCED code. Each value in degree - covers a specific level of education, without any expansion to - upper nor lower levels of education degree. + Education degree in ISCED code. Each value in degree covers a + specific level of education, without any expansion to upper + nor lower levels of education degree. negated: - Optional. Whether to apply negation to the filter so profiles - matching the filter is excluded. + Whether to apply negation to the filter so profiles matching + the filter is excluded. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.EducationFilter) ), @@ -2543,9 +2670,7 @@ dict( DESCRIPTOR=_WORKEXPERIENCEFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Work experience filter. + __doc__="""Work experience filter. This filter is used to search for profiles with working experience length between @@ -2556,11 +2681,9 @@ Attributes: min_experience: - Optional. The minimum duration of the work experience - (inclusive). + The minimum duration of the work experience (inclusive). max_experience: - Optional. The maximum duration of the work experience - (exclusive). + The maximum duration of the work experience (exclusive). """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.WorkExperienceFilter) ), @@ -2573,9 +2696,7 @@ dict( DESCRIPTOR=_APPLICATIONDATEFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Application Date Range Filter. + __doc__="""Application Date Range Filter. The API matches profiles with [Application.application\_date][google.cloud.talent.v4beta1.Application.application\_date] @@ -2589,11 +2710,11 @@ Attributes: start_date: - Optional. Start date. If it's missing, The API matches - profiles with application date not after the end date. + Start date. If it's missing, The API matches profiles with + application date not after the end date. end_date: - Optional. End date. If it's missing, The API matches profiles - with application date not before the start date. + End date. If it's missing, The API matches profiles with + application date not before the start date. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.ApplicationDateFilter) ), @@ -2606,9 +2727,7 @@ dict( DESCRIPTOR=_APPLICATIONOUTCOMENOTESFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Outcome Notes Filter. + __doc__="""Outcome Notes Filter. Attributes: @@ -2617,10 +2736,10 @@ does an exact match on the [Application.outcome\_notes][google .cloud.talent.v4beta1.Application.outcome\_notes] in profiles. negated: - Optional. If true, The API excludes all candidates with any [A - pplication.outcome\_notes][google.cloud.talent.v4beta1.Applica - tion.outcome\_notes] matching the outcome reason specified in - the filter. + If true, The API excludes all candidates with any [Application + .outcome\_notes][google.cloud.talent.v4beta1.Application.outco + me\_notes] matching the outcome reason specified in the + filter. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.ApplicationOutcomeNotesFilter) ), @@ -2633,26 +2752,23 @@ dict( DESCRIPTOR=_APPLICATIONJOBFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Filter on the job information of Application. + __doc__="""Filter on the job information of Application. Attributes: job_requisition_id: - Optional. The job requisition id in the application. The API - does an exact match on the [Job.requisition\_id][google.cloud. - talent.v4beta1.Job.requisition\_id] of + The job requisition id in the application. The API does an + exact match on the [Job.requisition\_id][google.cloud.talent.v + 4beta1.Job.requisition\_id] of [Application.job][google.cloud.talent.v4beta1.Application.job] in profiles. job_title: - Optional. The job title in the application. The API does an - exact match on the - [Job.title][google.cloud.talent.v4beta1.Job.title] of + The job title in the application. The API does an exact match + on the [Job.title][google.cloud.talent.v4beta1.Job.title] of [Application.job][google.cloud.talent.v4beta1.Application.job] in profiles. negated: - Optional. If true, the API excludes all profiles with any + If true, the API excludes all profiles with any [Application.job][google.cloud.talent.v4beta1.Application.job] matching the filters. """, @@ -2667,24 +2783,22 @@ dict( DESCRIPTOR=_TIMEFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Filter on create timestamp or update timestamp of profiles. + __doc__="""Filter on create timestamp or update timestamp of profiles. Attributes: start_time: - Optional. Start timestamp, matching profiles with the start - time. If this field missing, The API matches profiles with - create / update timestamp before the end timestamp. + Start timestamp, matching profiles with the start time. If + this field missing, The API matches profiles with create / + update timestamp before the end timestamp. end_time: - Optional. End timestamp, matching profiles with the end time. - If this field missing, The API matches profiles with create / - update timestamp after the start timestamp. + End timestamp, matching profiles with the end time. If this + field missing, The API matches profiles with create / update + timestamp after the start timestamp. time_field: - Optional. Specifies which time field to filter profiles. - Defaults to [TimeField.CREATE\_TIME][google.cloud.talent.v4bet - a1.TimeFilter.TimeField.CREATE\_TIME]. + Specifies which time field to filter profiles. Defaults to [T + imeField.CREATE\_TIME][google.cloud.talent.v4beta1.TimeFilter. + TimeField.CREATE\_TIME]. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.TimeFilter) ), @@ -2697,30 +2811,55 @@ dict( DESCRIPTOR=_CANDIDATEAVAILABILITYFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only + __doc__="""Deprecated. Use AvailabilityFilter instead. Filter on availability signals. Attributes: negated: - Optional. It is false by default. If true, API excludes all - the potential available profiles. + It is false by default. If true, API excludes all the + potential available profiles. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.CandidateAvailabilityFilter) ), ) _sym_db.RegisterMessage(CandidateAvailabilityFilter) +AvailabilityFilter = _reflection.GeneratedProtocolMessageType( + "AvailabilityFilter", + (_message.Message,), + dict( + DESCRIPTOR=_AVAILABILITYFILTER, + __module__="google.cloud.talent_v4beta1.proto.filters_pb2", + __doc__="""Filter on availability signals. + + + Attributes: + signal_type: + Required. Type of signal to apply filter on. + range: + Required. Range of times to filter candidate signals by. + required: + If multiple [AvailabilityFilter][google.cloud.talent.v4beta1.A + vailabilityFilter] are provided, the default behavior is to OR + all filters, but if this field is set to true, this particular + [AvailabilityFilter][google.cloud.talent.v4beta1.AvailabilityF + ilter] will be AND'ed against other [AvailabilityFilter][googl + e.cloud.talent.v4beta1.AvailabilityFilter]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.AvailabilityFilter) + ), +) +_sym_db.RegisterMessage(AvailabilityFilter) + PersonNameFilter = _reflection.GeneratedProtocolMessageType( "PersonNameFilter", (_message.Message,), dict( DESCRIPTOR=_PERSONNAMEFILTER, __module__="google.cloud.talent_v4beta1.proto.filters_pb2", - __doc__="""Input only. - - Filter on person name. + __doc__="""Filter on person name. Attributes: @@ -2740,4 +2879,18 @@ DESCRIPTOR._options = None +_PROFILEQUERY.fields_by_name["candidate_availability_filter"]._options = None +_COMPENSATIONFILTER.fields_by_name["type"]._options = None +_COMPENSATIONFILTER.fields_by_name["units"]._options = None +_COMMUTEFILTER.fields_by_name["commute_method"]._options = None +_COMMUTEFILTER.fields_by_name["start_coordinates"]._options = None +_COMMUTEFILTER.fields_by_name["travel_duration"]._options = None +_JOBTITLEFILTER.fields_by_name["job_title"]._options = None +_SKILLFILTER.fields_by_name["skill"]._options = None +_EMPLOYERFILTER.fields_by_name["employer"]._options = None +_APPLICATIONOUTCOMENOTESFILTER.fields_by_name["outcome_notes"]._options = None +_CANDIDATEAVAILABILITYFILTER._options = None +_AVAILABILITYFILTER.fields_by_name["signal_type"]._options = None +_AVAILABILITYFILTER.fields_by_name["range"]._options = None +_PERSONNAMEFILTER.fields_by_name["person_name"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/talent/google/cloud/talent_v4beta1/proto/histogram.proto b/talent/google/cloud/talent_v4beta1/proto/histogram.proto index 91183fb03c4c..90d9c55c84d1 100644 --- a/talent/google/cloud/talent_v4beta1/proto/histogram.proto +++ b/talent/google/cloud/talent_v4beta1/proto/histogram.proto @@ -25,8 +25,6 @@ option java_outer_classname = "HistogramProto"; option java_package = "com.google.cloud.talent.v4beta1"; option objc_class_prefix = "CTS"; -// Input Only. -// // The histogram request. message HistogramQuery { // An expression specifies a histogram request against matching resources @@ -40,8 +38,6 @@ message HistogramQuery { string histogram_query = 1; } -// Output only. -// // Histogram result that matches // [HistogramQuery][google.cloud.talent.v4beta1.HistogramQuery] specified in // searches. diff --git a/talent/google/cloud/talent_v4beta1/proto/histogram_pb2.py b/talent/google/cloud/talent_v4beta1/proto/histogram_pb2.py index c56bda26312c..bf7d13858599 100644 --- a/talent/google/cloud/talent_v4beta1/proto/histogram_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/histogram_pb2.py @@ -197,9 +197,7 @@ dict( DESCRIPTOR=_HISTOGRAMQUERY, __module__="google.cloud.talent_v4beta1.proto.histogram_pb2", - __doc__="""Input Only. - - The histogram request. + __doc__="""The histogram request. Attributes: @@ -231,9 +229,7 @@ ), DESCRIPTOR=_HISTOGRAMQUERYRESULT, __module__="google.cloud.talent_v4beta1.proto.histogram_pb2", - __doc__="""Output only. - - Histogram result that matches + __doc__="""Histogram result that matches [HistogramQuery][google.cloud.talent.v4beta1.HistogramQuery] specified in searches. diff --git a/talent/google/cloud/talent_v4beta1/proto/job.proto b/talent/google/cloud/talent_v4beta1/proto/job.proto index 5c0837d093ec..ad4d25921e9e 100644 --- a/talent/google/cloud/talent_v4beta1/proto/job.proto +++ b/talent/google/cloud/talent_v4beta1/proto/job.proto @@ -18,6 +18,7 @@ syntax = "proto3"; package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; import "google/cloud/talent/v4beta1/common.proto"; import "google/protobuf/timestamp.proto"; import "google/type/postal_address.proto"; @@ -35,14 +36,14 @@ option objc_class_prefix = "CTS"; message Job { // Application related details of a job posting. message ApplicationInfo { - // Optional. Use this field to specify email address(es) to which resumes or + // Use this field to specify email address(es) to which resumes or // applications can be sent. // // The maximum number of allowed characters for each entry is 255. repeated string emails = 1; - // Optional. Use this field to provide instructions, such as "Mail your - // application to ...", that a candidate can follow to apply for the job. + // Use this field to provide instructions, such as "Mail your application + // to ...", that a candidate can follow to apply for the job. // // This field accepts and sanitizes HTML input, and also accepts // bold, italic, ordered list, and unordered list markup tags. @@ -50,15 +51,13 @@ message Job { // The maximum number of allowed characters is 3,000. string instruction = 2; - // Optional. Use this URI field to direct an applicant to a website, for - // example to link to an online application form. + // Use this URI field to direct an applicant to a website, for example to + // link to an online application form. // // The maximum number of allowed characters for each entry is 2,000. repeated string uris = 3; } - // Output only. - // // Derived details about the job posting. message DerivedInfo { // Structured locations of the job, resolved from @@ -76,15 +75,13 @@ message Job { repeated JobCategory job_categories = 3; } - // Input only. - // // Options for job processing. message ProcessingOptions { - // Optional. If set to `true`, the service does not attempt to resolve a + // If set to `true`, the service does not attempt to resolve a // more precise address for the job. bool disable_street_address_resolution = 1; - // Optional. Option for job HTML content sanitization. Applied fields are: + // Option for job HTML content sanitization. Applied fields are: // // * description // * applicationInfo.instruction @@ -106,11 +103,11 @@ message Job { // job is created. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}", for - // example, "projects/api-test-project/tenants/foo/jobs/1234". + // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}". For + // example, "projects/foo/tenants/bar/jobs/baz". // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project/jobs/1234". + // If tenant id is unspecified, the default tenant is used. For + // example, "projects/foo/jobs/bar". // // Use of this field in job queries and API calls is preferred over the use of // [requisition_id][google.cloud.talent.v4beta1.Job.requisition_id] since this @@ -120,12 +117,12 @@ message Job { // Required. The resource name of the company listing the job. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}", for - // example, "projects/api-test-project/tenants/foo/companies/bar". + // "projects/{project_id}/tenants/{tenant_id}/companies/{company_id}". For + // example, "projects/foo/tenants/bar/companies/baz". // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project/companies/bar". - string company = 2; + // If tenant id is unspecified, the default tenant is used. For + // example, "projects/foo/companies/bar". + string company = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The requisition ID, also referred to as the posting ID, is // assigned by the client to identify a job. This field is intended to be used @@ -136,12 +133,12 @@ message Job { // [requisition_id][google.cloud.talent.v4beta1.Job.requisition_id]. // // The maximum number of allowed characters is 255. - string requisition_id = 3; + string requisition_id = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The title of the job, such as "Software Engineer" // // The maximum number of allowed characters is 500. - string title = 4; + string title = 4 [(google.api.field_behavior) = REQUIRED]; // Required. The description of the job, which typically includes a // multi-paragraph description of the company and related information. @@ -154,9 +151,9 @@ message Job { // bold, italic, ordered list, and unordered list markup tags. // // The maximum number of allowed characters is 100,000. - string description = 5; + string description = 5 [(google.api.field_behavior) = REQUIRED]; - // Optional but strongly recommended for the best service experience. + // Strongly recommended for the best service experience. // // Location(s) where the employer is looking to hire for this job posting. // @@ -179,18 +176,18 @@ message Job { // The maximum number of allowed characters is 500. repeated string addresses = 6; - // Optional. Job application information. + // Job application information. ApplicationInfo application_info = 7; - // Optional. The benefits included with the job. + // The benefits included with the job. repeated JobBenefit job_benefits = 8; - // Optional. Job compensation information (a.k.a. "pay rate") i.e., the - // compensation that will paid to the employee. + // Job compensation information (a.k.a. "pay rate") i.e., the compensation + // that will paid to the employee. CompensationInfo compensation_info = 9; - // Optional. A map of fields to hold both filterable and non-filterable custom - // job attributes that are not covered by the provided structured fields. + // A map of fields to hold both filterable and non-filterable custom job + // attributes that are not covered by the provided structured fields. // // The keys of the map are strings up to 64 bytes and must match the // pattern: [a-zA-Z][a-zA-Z0-9_]*. For example, key0LikeThis or @@ -203,28 +200,27 @@ message Job { // is 50KB. map custom_attributes = 10; - // Optional. The desired education degrees for the job, such as Bachelors, - // Masters. + // The desired education degrees for the job, such as Bachelors, Masters. repeated DegreeType degree_types = 11; - // Optional. The department or functional area within the company with the - // open position. + // The department or functional area within the company with the open + // position. // // The maximum number of allowed characters is 255. string department = 12; - // Optional. The employment type(s) of a job, for example, + // The employment type(s) of a job, for example, // [full time][google.cloud.talent.v4beta1.EmploymentType.FULL_TIME] or // [part time][google.cloud.talent.v4beta1.EmploymentType.PART_TIME]. repeated EmploymentType employment_types = 13; - // Optional. A description of bonus, commission, and other compensation + // A description of bonus, commission, and other compensation // incentives associated with the job not including salary or pay. // // The maximum number of allowed characters is 10,000. string incentives = 14; - // Optional. The language of the posting. This field is distinct from + // The language of the posting. This field is distinct from // any requirements for fluency that are associated with the job. // // Language codes must be in BCP-47 format, such as "en-US" or "sr-Latn". @@ -239,11 +235,10 @@ message Job { // otherwise defaults to 'en_US'. string language_code = 15; - // Optional. The experience level associated with the job, such as "Entry - // Level". + // The experience level associated with the job, such as "Entry Level". JobLevel job_level = 16; - // Optional. A promotion value of the job, as determined by the client. + // A promotion value of the job, as determined by the client. // The value determines the sort order of the jobs returned when searching for // jobs using the featured jobs search call, with higher promotional values // being returned first and ties being resolved by relevance sort. Only the @@ -252,7 +247,7 @@ message Job { // Default value is 0, and negative values are treated as 0. int32 promotion_value = 17; - // Optional. A description of the qualifications required to perform the + // A description of the qualifications required to perform the // job. The use of this field is recommended // as an alternative to using the more general // [description][google.cloud.talent.v4beta1.Job.description] field. @@ -263,7 +258,7 @@ message Job { // The maximum number of allowed characters is 10,000. string qualifications = 18; - // Optional. A description of job responsibilities. The use of this field is + // A description of job responsibilities. The use of this field is // recommended as an alternative to using the more general // [description][google.cloud.talent.v4beta1.Job.description] field. // @@ -273,12 +268,12 @@ message Job { // The maximum number of allowed characters is 10,000. string responsibilities = 19; - // Optional. The job - // [PostingRegion][google.cloud.talent.v4beta1.PostingRegion] (for example, - // state, country) throughout which the job is available. If this field is - // set, a [LocationFilter][google.cloud.talent.v4beta1.LocationFilter] in a - // search query within the job region finds this job posting if an exact - // location match isn't specified. If this field is set to + // The job [PostingRegion][google.cloud.talent.v4beta1.PostingRegion] (for + // example, state, country) throughout which the job is available. If this + // field is set, a + // [LocationFilter][google.cloud.talent.v4beta1.LocationFilter] in a search + // query within the job region finds this job posting if an exact location + // match isn't specified. If this field is set to // [PostingRegion.NATION][google.cloud.talent.v4beta1.PostingRegion.NATION] or // [PostingRegion.ADMINISTRATIVE_AREA][google.cloud.talent.v4beta1.PostingRegion.ADMINISTRATIVE_AREA], // setting job [Job.addresses][google.cloud.talent.v4beta1.Job.addresses] to @@ -294,21 +289,20 @@ message Job { // if not specified. Visibility visibility = 21 [deprecated = true]; - // Optional. The start timestamp of the job in UTC time zone. Typically this - // field is used for contracting engagements. Invalid timestamps are ignored. + // The start timestamp of the job in UTC time zone. Typically this field + // is used for contracting engagements. Invalid timestamps are ignored. google.protobuf.Timestamp job_start_time = 22; - // Optional. The end timestamp of the job. Typically this field is used for - // contracting engagements. Invalid timestamps are ignored. + // The end timestamp of the job. Typically this field is used for contracting + // engagements. Invalid timestamps are ignored. google.protobuf.Timestamp job_end_time = 23; - // Optional. The timestamp this job posting was most recently published. The - // default value is the time the request arrives at the server. Invalid - // timestamps are ignored. + // The timestamp this job posting was most recently published. The default + // value is the time the request arrives at the server. Invalid timestamps are + // ignored. google.protobuf.Timestamp posting_publish_time = 24; - // Optional but strongly recommended for the best service - // experience. + // Strongly recommended for the best service experience. // // The expiration timestamp of the job. After this timestamp, the // job is marked as expired, and it no longer appears in search results. The @@ -361,17 +355,19 @@ message Job { google.protobuf.Timestamp posting_expire_time = 25; // Output only. The timestamp when this job posting was created. - google.protobuf.Timestamp posting_create_time = 26; + google.protobuf.Timestamp posting_create_time = 26 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The timestamp when this job posting was last updated. - google.protobuf.Timestamp posting_update_time = 27; + google.protobuf.Timestamp posting_update_time = 27 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Display name of the company listing the job. - string company_display_name = 28; + string company_display_name = 28 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Derived details about the job posting. - DerivedInfo derived_info = 29; + DerivedInfo derived_info = 29 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Optional. Options for job processing. + // Options for job processing. ProcessingOptions processing_options = 30; } diff --git a/talent/google/cloud/talent_v4beta1/proto/job_pb2.py b/talent/google/cloud/talent_v4beta1/proto/job_pb2.py index 1d3e43afd8d2..c46362bd3a4c 100644 --- a/talent/google/cloud/talent_v4beta1/proto/job_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/job_pb2.py @@ -16,6 +16,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.talent_v4beta1.proto import ( common_pb2 as google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2, ) @@ -31,10 +32,11 @@ "\n\037com.google.cloud.talent.v4beta1B\020JobResourceProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - "\n+google/cloud/talent_v4beta1/proto/job.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a google/type/postal_address.proto\"\xab\x0e\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07\x63ompany\x18\x02 \x01(\t\x12\x16\n\x0erequisition_id\x18\x03 \x01(\t\x12\r\n\x05title\x18\x04 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x05 \x01(\t\x12\x11\n\taddresses\x18\x06 \x03(\t\x12J\n\x10\x61pplication_info\x18\x07 \x01(\x0b\x32\x30.google.cloud.talent.v4beta1.Job.ApplicationInfo\x12=\n\x0cjob_benefits\x18\x08 \x03(\x0e\x32'.google.cloud.talent.v4beta1.JobBenefit\x12H\n\x11\x63ompensation_info\x18\t \x01(\x0b\x32-.google.cloud.talent.v4beta1.CompensationInfo\x12Q\n\x11\x63ustom_attributes\x18\n \x03(\x0b\x32\x36.google.cloud.talent.v4beta1.Job.CustomAttributesEntry\x12=\n\x0c\x64\x65gree_types\x18\x0b \x03(\x0e\x32'.google.cloud.talent.v4beta1.DegreeType\x12\x12\n\ndepartment\x18\x0c \x01(\t\x12\x45\n\x10\x65mployment_types\x18\r \x03(\x0e\x32+.google.cloud.talent.v4beta1.EmploymentType\x12\x12\n\nincentives\x18\x0e \x01(\t\x12\x15\n\rlanguage_code\x18\x0f \x01(\t\x12\x38\n\tjob_level\x18\x10 \x01(\x0e\x32%.google.cloud.talent.v4beta1.JobLevel\x12\x17\n\x0fpromotion_value\x18\x11 \x01(\x05\x12\x16\n\x0equalifications\x18\x12 \x01(\t\x12\x18\n\x10responsibilities\x18\x13 \x01(\t\x12\x42\n\x0eposting_region\x18\x14 \x01(\x0e\x32*.google.cloud.talent.v4beta1.PostingRegion\x12?\n\nvisibility\x18\x15 \x01(\x0e\x32'.google.cloud.talent.v4beta1.VisibilityB\x02\x18\x01\x12\x32\n\x0ejob_start_time\x18\x16 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\x0cjob_end_time\x18\x17 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x14posting_publish_time\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x37\n\x13posting_expire_time\x18\x19 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x37\n\x13posting_create_time\x18\x1a \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x37\n\x13posting_update_time\x18\x1b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1c\n\x14\x63ompany_display_name\x18\x1c \x01(\t\x12\x42\n\x0c\x64\x65rived_info\x18\x1d \x01(\x0b\x32,.google.cloud.talent.v4beta1.Job.DerivedInfo\x12N\n\x12processing_options\x18\x1e \x01(\x0b\x32\x32.google.cloud.talent.v4beta1.Job.ProcessingOptions\x1a\x44\n\x0f\x41pplicationInfo\x12\x0e\n\x06\x65mails\x18\x01 \x03(\t\x12\x13\n\x0binstruction\x18\x02 \x01(\t\x12\x0c\n\x04uris\x18\x03 \x03(\t\x1a\x89\x01\n\x0b\x44\x65rivedInfo\x12\x38\n\tlocations\x18\x01 \x03(\x0b\x32%.google.cloud.talent.v4beta1.Location\x12@\n\x0ejob_categories\x18\x03 \x03(\x0e\x32(.google.cloud.talent.v4beta1.JobCategory\x1a\x88\x01\n\x11ProcessingOptions\x12)\n!disable_street_address_resolution\x18\x01 \x01(\x08\x12H\n\x11html_sanitization\x18\x02 \x01(\x0e\x32-.google.cloud.talent.v4beta1.HtmlSanitization\x1a\x65\n\x15\x43ustomAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12;\n\x05value\x18\x02 \x01(\x0b\x32,.google.cloud.talent.v4beta1.CustomAttribute:\x02\x38\x01\x42~\n\x1f\x63om.google.cloud.talent.v4beta1B\x10JobResourceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3" + "\n+google/cloud/talent_v4beta1/proto/job.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a google/type/postal_address.proto\"\xd3\x0e\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x07\x63ompany\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x1b\n\x0erequisition_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x12\n\x05title\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0b\x64\x65scription\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\taddresses\x18\x06 \x03(\t\x12J\n\x10\x61pplication_info\x18\x07 \x01(\x0b\x32\x30.google.cloud.talent.v4beta1.Job.ApplicationInfo\x12=\n\x0cjob_benefits\x18\x08 \x03(\x0e\x32'.google.cloud.talent.v4beta1.JobBenefit\x12H\n\x11\x63ompensation_info\x18\t \x01(\x0b\x32-.google.cloud.talent.v4beta1.CompensationInfo\x12Q\n\x11\x63ustom_attributes\x18\n \x03(\x0b\x32\x36.google.cloud.talent.v4beta1.Job.CustomAttributesEntry\x12=\n\x0c\x64\x65gree_types\x18\x0b \x03(\x0e\x32'.google.cloud.talent.v4beta1.DegreeType\x12\x12\n\ndepartment\x18\x0c \x01(\t\x12\x45\n\x10\x65mployment_types\x18\r \x03(\x0e\x32+.google.cloud.talent.v4beta1.EmploymentType\x12\x12\n\nincentives\x18\x0e \x01(\t\x12\x15\n\rlanguage_code\x18\x0f \x01(\t\x12\x38\n\tjob_level\x18\x10 \x01(\x0e\x32%.google.cloud.talent.v4beta1.JobLevel\x12\x17\n\x0fpromotion_value\x18\x11 \x01(\x05\x12\x16\n\x0equalifications\x18\x12 \x01(\t\x12\x18\n\x10responsibilities\x18\x13 \x01(\t\x12\x42\n\x0eposting_region\x18\x14 \x01(\x0e\x32*.google.cloud.talent.v4beta1.PostingRegion\x12?\n\nvisibility\x18\x15 \x01(\x0e\x32'.google.cloud.talent.v4beta1.VisibilityB\x02\x18\x01\x12\x32\n\x0ejob_start_time\x18\x16 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\x0cjob_end_time\x18\x17 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x14posting_publish_time\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x37\n\x13posting_expire_time\x18\x19 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x13posting_create_time\x18\x1a \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12<\n\x13posting_update_time\x18\x1b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12!\n\x14\x63ompany_display_name\x18\x1c \x01(\tB\x03\xe0\x41\x03\x12G\n\x0c\x64\x65rived_info\x18\x1d \x01(\x0b\x32,.google.cloud.talent.v4beta1.Job.DerivedInfoB\x03\xe0\x41\x03\x12N\n\x12processing_options\x18\x1e \x01(\x0b\x32\x32.google.cloud.talent.v4beta1.Job.ProcessingOptions\x1a\x44\n\x0f\x41pplicationInfo\x12\x0e\n\x06\x65mails\x18\x01 \x03(\t\x12\x13\n\x0binstruction\x18\x02 \x01(\t\x12\x0c\n\x04uris\x18\x03 \x03(\t\x1a\x89\x01\n\x0b\x44\x65rivedInfo\x12\x38\n\tlocations\x18\x01 \x03(\x0b\x32%.google.cloud.talent.v4beta1.Location\x12@\n\x0ejob_categories\x18\x03 \x03(\x0e\x32(.google.cloud.talent.v4beta1.JobCategory\x1a\x88\x01\n\x11ProcessingOptions\x12)\n!disable_street_address_resolution\x18\x01 \x01(\x08\x12H\n\x11html_sanitization\x18\x02 \x01(\x0e\x32-.google.cloud.talent.v4beta1.HtmlSanitization\x1a\x65\n\x15\x43ustomAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12;\n\x05value\x18\x02 \x01(\x0b\x32,.google.cloud.talent.v4beta1.CustomAttribute:\x02\x38\x01\x42~\n\x1f\x63om.google.cloud.talent.v4beta1B\x10JobResourceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3" ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_type_dot_postal__address__pb2.DESCRIPTOR, @@ -112,8 +114,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1607, - serialized_end=1675, + serialized_start=1680, + serialized_end=1748, ) _JOB_DERIVEDINFO = _descriptor.Descriptor( @@ -168,8 +170,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1678, - serialized_end=1815, + serialized_start=1751, + serialized_end=1888, ) _JOB_PROCESSINGOPTIONS = _descriptor.Descriptor( @@ -224,8 +226,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1818, - serialized_end=1954, + serialized_start=1891, + serialized_end=2027, ) _JOB_CUSTOMATTRIBUTESENTRY = _descriptor.Descriptor( @@ -280,8 +282,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1956, - serialized_end=2057, + serialized_start=2029, + serialized_end=2130, ) _JOB = _descriptor.Descriptor( @@ -324,7 +326,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -342,7 +344,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -360,7 +362,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -378,7 +380,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -756,7 +758,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -774,7 +776,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -792,7 +794,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -810,7 +812,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -845,8 +847,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=222, - serialized_end=2057, + serialized_start=255, + serialized_end=2130, ) _JOB_APPLICATIONINFO.containing_type = _JOB @@ -940,21 +942,20 @@ Attributes: emails: - Optional. Use this field to specify email address(es) to which - resumes or applications can be sent. The maximum number of - allowed characters for each entry is 255. + Use this field to specify email address(es) to which resumes + or applications can be sent. The maximum number of allowed + characters for each entry is 255. instruction: - Optional. Use this field to provide instructions, such as - "Mail your application to ...", that a candidate can follow to - apply for the job. This field accepts and sanitizes HTML - input, and also accepts bold, italic, ordered list, and - unordered list markup tags. The maximum number of allowed - characters is 3,000. + Use this field to provide instructions, such as "Mail your + application to ...", that a candidate can follow to apply for + the job. This field accepts and sanitizes HTML input, and + also accepts bold, italic, ordered list, and unordered list + markup tags. The maximum number of allowed characters is + 3,000. uris: - Optional. Use this URI field to direct an applicant to a - website, for example to link to an online application form. - The maximum number of allowed characters for each entry is - 2,000. + Use this URI field to direct an applicant to a website, for + example to link to an online application form. The maximum + number of allowed characters for each entry is 2,000. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.Job.ApplicationInfo) ), @@ -965,9 +966,7 @@ dict( DESCRIPTOR=_JOB_DERIVEDINFO, __module__="google.cloud.talent_v4beta1.proto.job_pb2", - __doc__="""Output only. - - Derived details about the job posting. + __doc__="""Derived details about the job posting. Attributes: @@ -992,23 +991,20 @@ dict( DESCRIPTOR=_JOB_PROCESSINGOPTIONS, __module__="google.cloud.talent_v4beta1.proto.job_pb2", - __doc__="""Input only. - - Options for job processing. + __doc__="""Options for job processing. Attributes: disable_street_address_resolution: - Optional. If set to ``true``, the service does not attempt to - resolve a more precise address for the job. + If set to ``true``, the service does not attempt to resolve a + more precise address for the job. html_sanitization: - Optional. Option for job HTML content sanitization. Applied - fields are: - description - applicationInfo.instruction - - incentives - qualifications - responsibilities HTML tags in - these fields may be stripped if sanitiazation isn't disabled. - Defaults to [HtmlSanitization.SIMPLE\_FORMATTING\_ONLY][google - .cloud.talent.v4beta1.HtmlSanitization.SIMPLE\_FORMATTING\_ONL - Y]. + Option for job HTML content sanitization. Applied fields are: + - description - applicationInfo.instruction - incentives - + qualifications - responsibilities HTML tags in these fields + may be stripped if sanitiazation isn't disabled. Defaults to + [HtmlSanitization.SIMPLE\_FORMATTING\_ONLY][google.cloud.talen + t.v4beta1.HtmlSanitization.SIMPLE\_FORMATTING\_ONLY]. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.Job.ProcessingOptions) ), @@ -1035,21 +1031,20 @@ Required during job update. The resource name for the job. This is generated by the service when a job is created. The format is - "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}", - for example, "projects/api-test- - project/tenants/foo/jobs/1234". Tenant id is optional and the - default tenant is used if unspecified, for example, - "projects/api-test-project/jobs/1234". Use of this field in - job queries and API calls is preferred over the use of [requis - ition\_id][google.cloud.talent.v4beta1.Job.requisition\_id] - since this value is unique. + "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}". + For example, "projects/foo/tenants/bar/jobs/baz". If tenant + id is unspecified, the default tenant is used. For example, + "projects/foo/jobs/bar". Use of this field in job queries and + API calls is preferred over the use of [requisition\_id][googl + e.cloud.talent.v4beta1.Job.requisition\_id] since this value + is unique. company: Required. The resource name of the company listing the job. The format is "projects/{project\_id}/tenants/{tenant\_id}/com - panies/{company\_id}", for example, "projects/api-test- - project/tenants/foo/companies/bar". Tenant id is optional and - the default tenant is used if unspecified, for example, - "projects/api-test-project/companies/bar". + panies/{company\_id}". For example, + "projects/foo/tenants/bar/companies/baz". If tenant id is + unspecified, the default tenant is used. For example, + "projects/foo/companies/bar". requisition_id: Required. The requisition ID, also referred to as the posting ID, is assigned by the client to identify a job. This field is @@ -1075,16 +1070,16 @@ list, and unordered list markup tags. The maximum number of allowed characters is 100,000. addresses: - Optional but strongly recommended for the best service - experience. Location(s) where the employer is looking to hire - for this job posting. Specifying the full street address(es) - of the hiring location enables better API results, especially - job searches by commute time. At most 50 locations are - allowed for best search performance. If a job has more - locations, it is suggested to split it into multiple jobs with - unique [requisition\_id][google.cloud.talent.v4beta1.Job.requi - sition\_id]s (e.g. 'ReqA' becomes 'ReqA-1', 'ReqA-2', and so - on.) as multiple jobs with the same + Strongly recommended for the best service experience. + Location(s) where the employer is looking to hire for this job + posting. Specifying the full street address(es) of the hiring + location enables better API results, especially job searches + by commute time. At most 50 locations are allowed for best + search performance. If a job has more locations, it is + suggested to split it into multiple jobs with unique [requisit + ion\_id][google.cloud.talent.v4beta1.Job.requisition\_id]s + (e.g. 'ReqA' becomes 'ReqA-1', 'ReqA-2', and so on.) as + multiple jobs with the same [company][google.cloud.talent.v4beta1.Job.company], [language\ _code][google.cloud.talent.v4beta1.Job.language\_code] and [re quisition\_id][google.cloud.talent.v4beta1.Job.requisition\_id @@ -1095,46 +1090,44 @@ job for better search experience. The maximum number of allowed characters is 500. application_info: - Optional. Job application information. + Job application information. job_benefits: - Optional. The benefits included with the job. + The benefits included with the job. compensation_info: - Optional. Job compensation information (a.k.a. "pay rate") - i.e., the compensation that will paid to the employee. + Job compensation information (a.k.a. "pay rate") i.e., the + compensation that will paid to the employee. custom_attributes: - Optional. A map of fields to hold both filterable and non- - filterable custom job attributes that are not covered by the - provided structured fields. The keys of the map are strings - up to 64 bytes and must match the pattern: - [a-zA-Z][a-zA-Z0-9\_]\*. For example, key0LikeThis or - KEY\_1\_LIKE\_THIS. At most 100 filterable and at most 100 - unfilterable keys are supported. For filterable - ``string_values``, across all keys at most 200 values are - allowed, with each string no more than 255 characters. For - unfilterable ``string_values``, the maximum total size of - ``string_values`` across all keys is 50KB. + A map of fields to hold both filterable and non-filterable + custom job attributes that are not covered by the provided + structured fields. The keys of the map are strings up to 64 + bytes and must match the pattern: [a-zA-Z][a-zA-Z0-9\_]\*. For + example, key0LikeThis or KEY\_1\_LIKE\_THIS. At most 100 + filterable and at most 100 unfilterable keys are supported. + For filterable ``string_values``, across all keys at most 200 + values are allowed, with each string no more than 255 + characters. For unfilterable ``string_values``, the maximum + total size of ``string_values`` across all keys is 50KB. degree_types: - Optional. The desired education degrees for the job, such as - Bachelors, Masters. + The desired education degrees for the job, such as Bachelors, + Masters. department: - Optional. The department or functional area within the company - with the open position. The maximum number of allowed - characters is 255. + The department or functional area within the company with the + open position. The maximum number of allowed characters is + 255. employment_types: - Optional. The employment type(s) of a job, for example, [full + The employment type(s) of a job, for example, [full time][google.cloud.talent.v4beta1.EmploymentType.FULL\_TIME] or [part time][google.cloud.talent.v4beta1.EmploymentType.PART\_TIME]. incentives: - Optional. A description of bonus, commission, and other - compensation incentives associated with the job not including - salary or pay. The maximum number of allowed characters is - 10,000. + A description of bonus, commission, and other compensation + incentives associated with the job not including salary or + pay. The maximum number of allowed characters is 10,000. language_code: - Optional. The language of the posting. This field is distinct - from any requirements for fluency that are associated with the - job. Language codes must be in BCP-47 format, such as "en-US" - or "sr-Latn". For more information, see `Tags for Identifying + The language of the posting. This field is distinct from any + requirements for fluency that are associated with the job. + Language codes must be in BCP-47 format, such as "en-US" or + "sr-Latn". For more information, see `Tags for Identifying Languages `__\ {: class="external" target="\_blank" }. If this field is unspecified and @@ -1143,35 +1136,33 @@ [Job.description][google.cloud.talent.v4beta1.Job.description] is assigned, otherwise defaults to 'en\_US'. job_level: - Optional. The experience level associated with the job, such - as "Entry Level". + The experience level associated with the job, such as "Entry + Level". promotion_value: - Optional. A promotion value of the job, as determined by the - client. The value determines the sort order of the jobs - returned when searching for jobs using the featured jobs - search call, with higher promotional values being returned - first and ties being resolved by relevance sort. Only the jobs - with a promotionValue >0 are returned in a - FEATURED\_JOB\_SEARCH. Default value is 0, and negative - values are treated as 0. + A promotion value of the job, as determined by the client. The + value determines the sort order of the jobs returned when + searching for jobs using the featured jobs search call, with + higher promotional values being returned first and ties being + resolved by relevance sort. Only the jobs with a + promotionValue >0 are returned in a FEATURED\_JOB\_SEARCH. + Default value is 0, and negative values are treated as 0. qualifications: - Optional. A description of the qualifications required to - perform the job. The use of this field is recommended as an - alternative to using the more general + A description of the qualifications required to perform the + job. The use of this field is recommended as an alternative to + using the more general [description][google.cloud.talent.v4beta1.Job.description] field. This field accepts and sanitizes HTML input, and also accepts bold, italic, ordered list, and unordered list markup tags. The maximum number of allowed characters is 10,000. responsibilities: - Optional. A description of job responsibilities. The use of - this field is recommended as an alternative to using the more - general + A description of job responsibilities. The use of this field + is recommended as an alternative to using the more general [description][google.cloud.talent.v4beta1.Job.description] field. This field accepts and sanitizes HTML input, and also accepts bold, italic, ordered list, and unordered list markup tags. The maximum number of allowed characters is 10,000. posting_region: - Optional. The job + The job [PostingRegion][google.cloud.talent.v4beta1.PostingRegion] (for example, state, country) throughout which the job is available. If this field is set, a @@ -1190,23 +1181,21 @@ [google.cloud.talent.v4beta1.Visibility.ACCOUNT\_ONLY] if not specified. job_start_time: - Optional. The start timestamp of the job in UTC time zone. - Typically this field is used for contracting engagements. - Invalid timestamps are ignored. + The start timestamp of the job in UTC time zone. Typically + this field is used for contracting engagements. Invalid + timestamps are ignored. job_end_time: - Optional. The end timestamp of the job. Typically this field - is used for contracting engagements. Invalid timestamps are - ignored. + The end timestamp of the job. Typically this field is used for + contracting engagements. Invalid timestamps are ignored. posting_publish_time: - Optional. The timestamp this job posting was most recently - published. The default value is the time the request arrives - at the server. Invalid timestamps are ignored. + The timestamp this job posting was most recently published. + The default value is the time the request arrives at the + server. Invalid timestamps are ignored. posting_expire_time: - Optional but strongly recommended for the best service - experience. The expiration timestamp of the job. After this - timestamp, the job is marked as expired, and it no longer - appears in search results. The expired job can't be listed by - the + Strongly recommended for the best service experience. The + expiration timestamp of the job. After this timestamp, the job + is marked as expired, and it no longer appears in search + results. The expired job can't be listed by the [ListJobs][google.cloud.talent.v4beta1.JobService.ListJobs] API, but it can be retrieved with the [GetJob][google.cloud.talent.v4beta1.JobService.GetJob] API or @@ -1260,7 +1249,7 @@ derived_info: Output only. Derived details about the job posting. processing_options: - Optional. Options for job processing. + Options for job processing. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.Job) ), @@ -1274,5 +1263,13 @@ DESCRIPTOR._options = None _JOB_CUSTOMATTRIBUTESENTRY._options = None +_JOB.fields_by_name["company"]._options = None +_JOB.fields_by_name["requisition_id"]._options = None +_JOB.fields_by_name["title"]._options = None +_JOB.fields_by_name["description"]._options = None _JOB.fields_by_name["visibility"]._options = None +_JOB.fields_by_name["posting_create_time"]._options = None +_JOB.fields_by_name["posting_update_time"]._options = None +_JOB.fields_by_name["company_display_name"]._options = None +_JOB.fields_by_name["derived_info"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/talent/google/cloud/talent_v4beta1/proto/job_service.proto b/talent/google/cloud/talent_v4beta1/proto/job_service.proto index ddec147d8410..53afa2377536 100644 --- a/talent/google/cloud/talent_v4beta1/proto/job_service.proto +++ b/talent/google/cloud/talent_v4beta1/proto/job_service.proto @@ -19,6 +19,7 @@ package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/cloud/talent/v4beta1/common.proto"; import "google/cloud/talent/v4beta1/filters.proto"; import "google/cloud/talent/v4beta1/histogram.proto"; @@ -58,6 +59,19 @@ service JobService { }; } + // Begins executing a batch create jobs operation. + rpc BatchCreateJobs(BatchCreateJobsRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v4beta1/{parent=projects/*/tenants/*}/jobs:batchCreate" + body: "*" + additional_bindings { + post: "/v4beta1/{parent=projects/*}/jobs:batchCreate" + body: "*" + } + }; + } + // Retrieves the specified job, whose status is OPEN or recently EXPIRED // within the last 90 days. rpc GetJob(GetJobRequest) returns (Job) { @@ -82,6 +96,19 @@ service JobService { }; } + // Begins executing a batch update jobs operation. + rpc BatchUpdateJobs(BatchUpdateJobsRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v4beta1/{parent=projects/*/tenants/*}/jobs:batchUpdate" + body: "*" + additional_bindings { + post: "/v4beta1/{parent=projects/*}/jobs:batchUpdate" + body: "*" + } + }; + } + // Deletes the specified job. // // Typically, the job becomes unsearchable within 10 seconds, but it may take @@ -93,14 +120,6 @@ service JobService { }; } - // Lists jobs by filter. - rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { - option (google.api.http) = { - get: "/v4beta1/{parent=projects/*/tenants/*}/jobs" - additional_bindings { get: "/v4beta1/{parent=projects/*}/jobs" } - }; - } - // Deletes a list of [Job][google.cloud.talent.v4beta1.Job]s by filter. rpc BatchDeleteJobs(BatchDeleteJobsRequest) returns (google.protobuf.Empty) { option (google.api.http) = { @@ -112,6 +131,14 @@ service JobService { }; } + // Lists jobs by filter. + rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { + option (google.api.http) = { + get: "/v4beta1/{parent=projects/*/tenants/*}/jobs" + additional_bindings { get: "/v4beta1/{parent=projects/*}/jobs" } + }; + } + // Searches for jobs using the provided // [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest]. // @@ -152,75 +179,40 @@ service JobService { } }; } - - // Begins executing a batch create jobs operation. - rpc BatchCreateJobs(BatchCreateJobsRequest) - returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v4beta1/{parent=projects/*/tenants/*}/jobs:batchCreate" - body: "*" - additional_bindings { - post: "/v4beta1/{parent=projects/*}/jobs:batchCreate" - body: "*" - } - }; - } - - // Begins executing a batch update jobs operation. - rpc BatchUpdateJobs(BatchUpdateJobsRequest) - returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v4beta1/{parent=projects/*/tenants/*}/jobs:batchUpdate" - body: "*" - additional_bindings { - post: "/v4beta1/{parent=projects/*}/jobs:batchUpdate" - body: "*" - } - }; - } } -// Input only. -// // Create job request. message CreateJobRequest { // Required. The resource name of the tenant under which the job is created. // - // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenant/foo". - // - // Tenant id is optional and a default tenant is created if unspecified, for - // example, "projects/api-test-project". - string parent = 1; + // The format is "projects/{project_id}/tenants/{tenant_id}". For example, + // "projects/foo/tenant/bar". If tenant id is unspecified a default tenant + // is created. For example, "projects/foo". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Job to be created. - Job job = 2; + Job job = 2 [(google.api.field_behavior) = REQUIRED]; } -// Input only. -// // Get job request. message GetJobRequest { // Required. The resource name of the job to retrieve. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}", for - // example, "projects/api-test-project/tenants/foo/jobs/1234". + // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}". For + // example, "projects/foo/tenants/bar/jobs/baz". // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project/jobs/1234". - string name = 1; + // If tenant id is unspecified, the default tenant is used. For + // example, "projects/foo/jobs/bar". + string name = 1 [(google.api.field_behavior) = REQUIRED]; } -// Input only. -// // Update job request. message UpdateJobRequest { // Required. The Job to be updated. - Job job = 1; + Job job = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional but strongly recommended to be provided for the best service - // experience. + // Strongly recommended for the best service experience. // // If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] // is provided, only the specified fields in @@ -232,33 +224,27 @@ message UpdateJobRequest { google.protobuf.FieldMask update_mask = 2; } -// Input only. -// // Delete job request. message DeleteJobRequest { // Required. The resource name of the job to be deleted. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}", for - // example, "projects/api-test-project/tenants/foo/jobs/1234". + // "projects/{project_id}/tenants/{tenant_id}/jobs/{job_id}". For + // example, "projects/foo/tenants/bar/jobs/baz". // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project/jobs/1234". - string name = 1; + // If tenant id is unspecified, the default tenant is used. For + // example, "projects/foo/jobs/bar". + string name = 1 [(google.api.field_behavior) = REQUIRED]; } -// Input only. -// // Batch delete jobs request. message BatchDeleteJobsRequest { // Required. The resource name of the tenant under which the job is created. // - // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenant/foo". - // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project". - string parent = 1; + // The format is "projects/{project_id}/tenants/{tenant_id}". For example, + // "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant + // is created. For example, "projects/foo". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The filter string specifies the jobs to be deleted. // @@ -269,23 +255,19 @@ message BatchDeleteJobsRequest { // * `companyName` (Required) // * `requisitionId` (Required) // - // Sample Query: companyName = "projects/api-test-project/companies/123" AND + // Sample Query: companyName = "projects/foo/companies/bar" AND // requisitionId = "req-1" - string filter = 2; + string filter = 2 [(google.api.field_behavior) = REQUIRED]; } -// Input only. -// // List jobs request. message ListJobsRequest { // Required. The resource name of the tenant under which the job is created. // - // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenant/foo". - // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project". - string parent = 1; + // The format is "projects/{project_id}/tenants/{tenant_id}". For example, + // "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant + // is created. For example, "projects/foo". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The filter string specifies the jobs to be enumerated. // @@ -294,23 +276,23 @@ message ListJobsRequest { // The fields eligible for filtering are: // // * `companyName` (Required) - // * `requisitionId` (Optional) - // * `status` (Optional) Available values: OPEN, EXPIRED, ALL. Defaults to + // * `requisitionId` + // * `status` Available values: OPEN, EXPIRED, ALL. Defaults to // OPEN if no value is specified. // // Sample Query: // - // * companyName = "projects/api-test-project/tenants/foo/companies/bar" - // * companyName = "projects/api-test-project/tenants/foo/companies/bar" AND + // * companyName = "projects/foo/tenants/bar/companies/baz" + // * companyName = "projects/foo/tenants/bar/companies/baz" AND // requisitionId = "req-1" - // * companyName = "projects/api-test-project/tenants/foo/companies/bar" AND + // * companyName = "projects/foo/tenants/bar/companies/baz" AND // status = "EXPIRED" - string filter = 2; + string filter = 2 [(google.api.field_behavior) = REQUIRED]; - // Optional. The starting point of a query result. + // The starting point of a query result. string page_token = 3; - // Optional. The maximum number of jobs to be returned per page of results. + // The maximum number of jobs to be returned per page of results. // // If [job_view][google.cloud.talent.v4beta1.ListJobsRequest.job_view] is set // to @@ -321,7 +303,7 @@ message ListJobsRequest { // Default is 100 if empty or a number < 1 is specified. int32 page_size = 4; - // Optional. The desired job attributes returned for jobs in the + // The desired job attributes returned for jobs in the // search response. Defaults to // [JobView.JOB_VIEW_FULL][google.cloud.talent.v4beta1.JobView.JOB_VIEW_FULL] // if no value is specified. @@ -366,8 +348,6 @@ enum JobView { JOB_VIEW_FULL = 4; } -// Output only. -// // List jobs response. message ListJobsResponse { // The Jobs for a given company. @@ -384,12 +364,8 @@ message ListJobsResponse { ResponseMetadata metadata = 3; } -// Input only. -// // The Request body of the `SearchJobs` call. message SearchJobsRequest { - // Input only. - // // Custom ranking information for // [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest]. message CustomRankingInfo { @@ -435,7 +411,8 @@ message SearchJobsRequest { // gets applied to job's final ranking position. // // An error is thrown if not specified. - ImportanceLevel importance_level = 1; + ImportanceLevel importance_level = 1 + [(google.api.field_behavior) = REQUIRED]; // Required. Controls over how job documents get ranked on top of existing // relevance score (determined by API algorithm). The product of ranking @@ -455,13 +432,14 @@ message SearchJobsRequest { // // Sample ranking expression // (year + 25) * 0.25 - (freshness / 0.5) - string ranking_expression = 2; + string ranking_expression = 2 [(google.api.field_behavior) = REQUIRED]; } // A string-represented enumeration of the job search mode. The service // operate differently for different modes of service. enum SearchMode { - // The mode of the search method isn't specified. + // The mode of the search method isn't specified. The default search + // behavior is identical to JOB_SEARCH search behavior. SEARCH_MODE_UNSPECIFIED = 0; // The job search matches against all jobs, and featured jobs @@ -499,14 +477,12 @@ message SearchJobsRequest { // Required. The resource name of the tenant to search within. // - // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenant/foo". - // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project". - string parent = 1; + // The format is "projects/{project_id}/tenants/{tenant_id}". For example, + // "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant + // is created. For example, "projects/foo". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. Mode of a search. + // Mode of a search. // // Defaults to // [SearchMode.JOB_SEARCH][google.cloud.talent.v4beta1.SearchJobsRequest.SearchMode.JOB_SEARCH]. @@ -515,21 +491,20 @@ message SearchJobsRequest { // Required. The meta information collected about the job searcher, used to // improve the search quality of the service. The identifiers (such as // `user_id`) are provided by users, and must be unique and consistent. - RequestMetadata request_metadata = 3; + RequestMetadata request_metadata = 3 [(google.api.field_behavior) = REQUIRED]; - // Optional. Query used to search against jobs, such as keyword, location - // filters, etc. + // Query used to search against jobs, such as keyword, location filters, etc. JobQuery job_query = 4; - // Optional. Controls whether to broaden the search when it produces sparse - // results. Broadened queries append results to the end of the matching - // results list. + // Controls whether to broaden the search when it produces sparse results. + // Broadened queries append results to the end of the matching results + // list. // // Defaults to false. bool enable_broadening = 5; - // Optional. Controls if the search job request requires the return of a - // precise count of the first 300 results. Setting this to `true` ensures + // Controls if the search job request requires the return of a precise + // count of the first 300 results. Setting this to `true` ensures // consistency in the number of results per page. Best practice is to set this // value to true if a client allows users to jump directly to a // non-sequential search results page. @@ -539,8 +514,7 @@ message SearchJobsRequest { // Defaults to false. bool require_precise_result_size = 6; - // Optional. An expression specifies a histogram request against matching - // jobs. + // An expression specifies a histogram request against matching jobs. // // Expression syntax is an aggregation function call with histogram facets and // other options. @@ -649,14 +623,14 @@ message SearchJobsRequest { // [bucket(MIN, 0, "negative"), bucket(0, MAX, "non-negative"])` repeated HistogramQuery histogram_queries = 7; - // Optional. The desired job attributes returned for jobs in the search - // response. Defaults to + // The desired job attributes returned for jobs in the search response. + // Defaults to // [JobView.JOB_VIEW_SMALL][google.cloud.talent.v4beta1.JobView.JOB_VIEW_SMALL] // if no value is specified. JobView job_view = 8; - // Optional. An integer that specifies the current offset (that is, starting - // result location, amongst the jobs deemed by the API as relevant) in search + // An integer that specifies the current offset (that is, starting result + // location, amongst the jobs deemed by the API as relevant) in search // results. This field is only considered if // [page_token][google.cloud.talent.v4beta1.SearchJobsRequest.page_token] is // unset. @@ -667,19 +641,18 @@ message SearchJobsRequest { // from the second page). int32 offset = 9; - // Optional. A limit on the number of jobs returned in the search results. + // A limit on the number of jobs returned in the search results. // Increasing this value above the default value of 10 can increase search // response time. The value can be between 1 and 100. int32 page_size = 10; - // Optional. The token specifying the current offset within + // The token specifying the current offset within // search results. See // [SearchJobsResponse.next_page_token][google.cloud.talent.v4beta1.SearchJobsResponse.next_page_token] // for an explanation of how to obtain the next set of query results. string page_token = 11; - // Optional. The criteria determining how search results are sorted. Default - // is + // The criteria determining how search results are sorted. Default is // `"relevance desc"`. // // Supported options are: @@ -735,8 +708,8 @@ message SearchJobsRequest { // [diversification_level][google.cloud.talent.v4beta1.SearchJobsRequest.diversification_level]. string order_by = 12; - // Optional. Controls whether highly similar jobs are returned next to each - // other in the search results. Jobs are identified as highly similar based on + // Controls whether highly similar jobs are returned next to each other in + // the search results. Jobs are identified as highly similar based on // their titles, job categories, and locations. Highly similar results are // clustered so that only one representative job of the cluster is // displayed to the job seeker higher up in the results, with the other jobs @@ -747,11 +720,11 @@ message SearchJobsRequest { // if no value is specified. DiversificationLevel diversification_level = 13; - // Optional. Controls over how job documents get ranked on top of existing - // relevance score (determined by API algorithm). + // Controls over how job documents get ranked on top of existing relevance + // score (determined by API algorithm). CustomRankingInfo custom_ranking_info = 14; - // Optional. Controls whether to disable exact keyword match on + // Controls whether to disable exact keyword match on // [Job.title][google.cloud.talent.v4beta1.Job.title], // [Job.description][google.cloud.talent.v4beta1.Job.description], // [Job.company_display_name][google.cloud.talent.v4beta1.Job.company_display_name], @@ -778,12 +751,8 @@ message SearchJobsRequest { bool disable_keyword_match = 16; } -// Output only. -// // Response for SearchJob method. message SearchJobsResponse { - // Output only. - // // Job entry with metadata inside // [SearchJobsResponse][google.cloud.talent.v4beta1.SearchJobsResponse]. message MatchingJob { @@ -814,8 +783,6 @@ message SearchJobsResponse { CommuteInfo commute_info = 5; } - // Output only. - // // Commute details related to this job. message CommuteInfo { // Location used as the destination in the commute calculation. @@ -881,33 +848,29 @@ message SearchJobsResponse { message BatchCreateJobsRequest { // Required. The resource name of the tenant under which the job is created. // - // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenant/foo". - // - // Tenant id is optional and a default tenant is created if unspecified, for - // example, "projects/api-test-project". - string parent = 1; + // The format is "projects/{project_id}/tenants/{tenant_id}". For example, + // "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant + // is created. For example, "projects/foo". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The jobs to be created. - repeated Job jobs = 2; + repeated Job jobs = 2 [(google.api.field_behavior) = REQUIRED]; } // Request to update a batch of jobs. message BatchUpdateJobsRequest { // Required. The resource name of the tenant under which the job is created. // - // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenant/foo". - // - // Tenant id is optional and the default tenant is used if unspecified, for - // example, "projects/api-test-project". - string parent = 1; + // The format is "projects/{project_id}/tenants/{tenant_id}". For example, + // "projects/foo/tenant/bar". If tenant id is unspecified, a default tenant + // is created. For example, "projects/foo". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The jobs to be updated. + // The jobs to be updated. repeated Job jobs = 2; - // Optional but strongly recommended to be provided for the best service - // experience, also increase latency when checking status of batch operation. + // Strongly recommended for the best service experience. Be aware that it will + // also increase latency when checking the status of a batch operation. // // If // [update_mask][google.cloud.talent.v4beta1.BatchUpdateJobsRequest.update_mask] @@ -928,8 +891,6 @@ message BatchUpdateJobsRequest { google.protobuf.FieldMask update_mask = 3; } -// Output only. -// // The result of // [JobService.BatchCreateJobs][google.cloud.talent.v4beta1.JobService.BatchCreateJobs] // or diff --git a/talent/google/cloud/talent_v4beta1/proto/job_service_pb2.py b/talent/google/cloud/talent_v4beta1/proto/job_service_pb2.py index b96c8d7aba09..f9d43faf6b19 100644 --- a/talent/google/cloud/talent_v4beta1/proto/job_service_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/job_service_pb2.py @@ -18,6 +18,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.talent_v4beta1.proto import ( common_pb2 as google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2, ) @@ -48,11 +49,12 @@ "\n\037com.google.cloud.talent.v4beta1B\017JobServiceProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n3google/cloud/talent_v4beta1/proto/job_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a/google/cloud/talent_v4beta1/proto/filters.proto\x1a\x31google/cloud/talent_v4beta1/proto/histogram.proto\x1a+google/cloud/talent_v4beta1/proto/job.proto\x1a#google/longrunning/operations.proto\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/rpc/status.proto"Q\n\x10\x43reateJobRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12-\n\x03job\x18\x02 \x01(\x0b\x32 .google.cloud.talent.v4beta1.Job"\x1d\n\rGetJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"r\n\x10UpdateJobRequest\x12-\n\x03job\x18\x01 \x01(\x0b\x32 .google.cloud.talent.v4beta1.Job\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask" \n\x10\x44\x65leteJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"8\n\x16\x42\x61tchDeleteJobsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t"\x90\x01\n\x0fListJobsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x36\n\x08job_view\x18\x05 \x01(\x0e\x32$.google.cloud.talent.v4beta1.JobView"\x9c\x01\n\x10ListJobsResponse\x12.\n\x04jobs\x18\x01 \x03(\x0b\x32 .google.cloud.talent.v4beta1.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata"\xa0\t\n\x11SearchJobsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12N\n\x0bsearch_mode\x18\x02 \x01(\x0e\x32\x39.google.cloud.talent.v4beta1.SearchJobsRequest.SearchMode\x12\x46\n\x10request_metadata\x18\x03 \x01(\x0b\x32,.google.cloud.talent.v4beta1.RequestMetadata\x12\x38\n\tjob_query\x18\x04 \x01(\x0b\x32%.google.cloud.talent.v4beta1.JobQuery\x12\x19\n\x11\x65nable_broadening\x18\x05 \x01(\x08\x12#\n\x1brequire_precise_result_size\x18\x06 \x01(\x08\x12\x46\n\x11histogram_queries\x18\x07 \x03(\x0b\x32+.google.cloud.talent.v4beta1.HistogramQuery\x12\x36\n\x08job_view\x18\x08 \x01(\x0e\x32$.google.cloud.talent.v4beta1.JobView\x12\x0e\n\x06offset\x18\t \x01(\x05\x12\x11\n\tpage_size\x18\n \x01(\x05\x12\x12\n\npage_token\x18\x0b \x01(\t\x12\x10\n\x08order_by\x18\x0c \x01(\t\x12\x62\n\x15\x64iversification_level\x18\r \x01(\x0e\x32\x43.google.cloud.talent.v4beta1.SearchJobsRequest.DiversificationLevel\x12]\n\x13\x63ustom_ranking_info\x18\x0e \x01(\x0b\x32@.google.cloud.talent.v4beta1.SearchJobsRequest.CustomRankingInfo\x12\x1d\n\x15\x64isable_keyword_match\x18\x10 \x01(\x08\x1a\x90\x02\n\x11\x43ustomRankingInfo\x12j\n\x10importance_level\x18\x01 \x01(\x0e\x32P.google.cloud.talent.v4beta1.SearchJobsRequest.CustomRankingInfo.ImportanceLevel\x12\x1a\n\x12ranking_expression\x18\x02 \x01(\t"s\n\x0fImportanceLevel\x12 \n\x1cIMPORTANCE_LEVEL_UNSPECIFIED\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\x07\n\x03LOW\x10\x02\x12\x08\n\x04MILD\x10\x03\x12\n\n\x06MEDIUM\x10\x04\x12\x08\n\x04HIGH\x10\x05\x12\x0b\n\x07\x45XTREME\x10\x06"R\n\nSearchMode\x12\x1b\n\x17SEARCH_MODE_UNSPECIFIED\x10\x00\x12\x0e\n\nJOB_SEARCH\x10\x01\x12\x17\n\x13\x46\x45\x41TURED_JOB_SEARCH\x10\x02"W\n\x14\x44iversificationLevel\x12%\n!DIVERSIFICATION_LEVEL_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01\x12\n\n\x06SIMPLE\x10\x02"\xd7\x06\n\x12SearchJobsResponse\x12R\n\rmatching_jobs\x18\x01 \x03(\x0b\x32;.google.cloud.talent.v4beta1.SearchJobsResponse.MatchingJob\x12R\n\x17histogram_query_results\x18\x02 \x03(\x0b\x32\x31.google.cloud.talent.v4beta1.HistogramQueryResult\x12\x17\n\x0fnext_page_token\x18\x03 \x01(\t\x12?\n\x10location_filters\x18\x04 \x03(\x0b\x32%.google.cloud.talent.v4beta1.Location\x12\x1c\n\x14\x65stimated_total_size\x18\x05 \x01(\x05\x12\x12\n\ntotal_size\x18\x06 \x01(\x05\x12?\n\x08metadata\x18\x07 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata\x12"\n\x1a\x62roadened_query_jobs_count\x18\x08 \x01(\x05\x12I\n\x10spell_correction\x18\t \x01(\x0b\x32/.google.cloud.talent.v4beta1.SpellingCorrection\x1a\xdc\x01\n\x0bMatchingJob\x12-\n\x03job\x18\x01 \x01(\x0b\x32 .google.cloud.talent.v4beta1.Job\x12\x13\n\x0bjob_summary\x18\x02 \x01(\t\x12\x19\n\x11job_title_snippet\x18\x03 \x01(\t\x12\x1b\n\x13search_text_snippet\x18\x04 \x01(\t\x12Q\n\x0c\x63ommute_info\x18\x05 \x01(\x0b\x32;.google.cloud.talent.v4beta1.SearchJobsResponse.CommuteInfo\x1a~\n\x0b\x43ommuteInfo\x12;\n\x0cjob_location\x18\x01 \x01(\x0b\x32%.google.cloud.talent.v4beta1.Location\x12\x32\n\x0ftravel_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"X\n\x16\x42\x61tchCreateJobsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12.\n\x04jobs\x18\x02 \x03(\x0b\x32 .google.cloud.talent.v4beta1.Job"\x89\x01\n\x16\x42\x61tchUpdateJobsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12.\n\x04jobs\x18\x02 \x03(\x0b\x32 .google.cloud.talent.v4beta1.Job\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xc4\x01\n\x12JobOperationResult\x12N\n\x0bjob_results\x18\x01 \x03(\x0b\x32\x39.google.cloud.talent.v4beta1.JobOperationResult.JobResult\x1a^\n\tJobResult\x12-\n\x03job\x18\x01 \x01(\x0b\x32 .google.cloud.talent.v4beta1.Job\x12"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status*v\n\x07JobView\x12\x18\n\x14JOB_VIEW_UNSPECIFIED\x10\x00\x12\x14\n\x10JOB_VIEW_ID_ONLY\x10\x01\x12\x14\n\x10JOB_VIEW_MINIMAL\x10\x02\x12\x12\n\x0eJOB_VIEW_SMALL\x10\x03\x12\x11\n\rJOB_VIEW_FULL\x10\x04\x32\x90\x11\n\nJobService\x12\xbc\x01\n\tCreateJob\x12-.google.cloud.talent.v4beta1.CreateJobRequest\x1a .google.cloud.talent.v4beta1.Job"^\x82\xd3\xe4\x93\x02X"+/v4beta1/{parent=projects/*/tenants/*}/jobs:\x01*Z&"!/v4beta1/{parent=projects/*}/jobs:\x01*\x12\xb0\x01\n\x06GetJob\x12*.google.cloud.talent.v4beta1.GetJobRequest\x1a .google.cloud.talent.v4beta1.Job"X\x82\xd3\xe4\x93\x02R\x12+/v4beta1/{name=projects/*/tenants/*/jobs/*}Z#\x12!/v4beta1/{name=projects/*/jobs/*}\x12\xc4\x01\n\tUpdateJob\x12-.google.cloud.talent.v4beta1.UpdateJobRequest\x1a .google.cloud.talent.v4beta1.Job"f\x82\xd3\xe4\x93\x02`2//v4beta1/{job.name=projects/*/tenants/*/jobs/*}:\x01*Z*2%/v4beta1/{job.name=projects/*/jobs/*}:\x01*\x12\xac\x01\n\tDeleteJob\x12-.google.cloud.talent.v4beta1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"X\x82\xd3\xe4\x93\x02R*+/v4beta1/{name=projects/*/tenants/*/jobs/*}Z#*!/v4beta1/{name=projects/*/jobs/*}\x12\xc1\x01\n\x08ListJobs\x12,.google.cloud.talent.v4beta1.ListJobsRequest\x1a-.google.cloud.talent.v4beta1.ListJobsResponse"X\x82\xd3\xe4\x93\x02R\x12+/v4beta1/{parent=projects/*/tenants/*}/jobsZ#\x12!/v4beta1/{parent=projects/*}/jobs\x12\xd3\x01\n\x0f\x42\x61tchDeleteJobs\x12\x33.google.cloud.talent.v4beta1.BatchDeleteJobsRequest\x1a\x16.google.protobuf.Empty"s\x82\xd3\xe4\x93\x02m"7/v4beta1/{parent=projects/*/tenants/*}/jobs:batchDelete:\x01*Z/"-/v4beta1/{parent=projects/*}/jobs:batchDelete\x12\xdb\x01\n\nSearchJobs\x12..google.cloud.talent.v4beta1.SearchJobsRequest\x1a/.google.cloud.talent.v4beta1.SearchJobsResponse"l\x82\xd3\xe4\x93\x02\x66"2/v4beta1/{parent=projects/*/tenants/*}/jobs:search:\x01*Z-"(/v4beta1/{parent=projects/*}/jobs:search:\x01*\x12\xf3\x01\n\x12SearchJobsForAlert\x12..google.cloud.talent.v4beta1.SearchJobsRequest\x1a/.google.cloud.talent.v4beta1.SearchJobsResponse"|\x82\xd3\xe4\x93\x02v":/v4beta1/{parent=projects/*/tenants/*}/jobs:searchForAlert:\x01*Z5"0/v4beta1/{parent=projects/*}/jobs:searchForAlert:\x01*\x12\xdd\x01\n\x0f\x42\x61tchCreateJobs\x12\x33.google.cloud.talent.v4beta1.BatchCreateJobsRequest\x1a\x1d.google.longrunning.Operation"v\x82\xd3\xe4\x93\x02p"7/v4beta1/{parent=projects/*/tenants/*}/jobs:batchCreate:\x01*Z2"-/v4beta1/{parent=projects/*}/jobs:batchCreate:\x01*\x12\xdd\x01\n\x0f\x42\x61tchUpdateJobs\x12\x33.google.cloud.talent.v4beta1.BatchUpdateJobsRequest\x1a\x1d.google.longrunning.Operation"v\x82\xd3\xe4\x93\x02p"7/v4beta1/{parent=projects/*/tenants/*}/jobs:batchUpdate:\x01*Z2"-/v4beta1/{parent=projects/*}/jobs:batchUpdate:\x01*\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB}\n\x1f\x63om.google.cloud.talent.v4beta1B\x0fJobServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n3google/cloud/talent_v4beta1/proto/job_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a/google/cloud/talent_v4beta1/proto/filters.proto\x1a\x31google/cloud/talent_v4beta1/proto/histogram.proto\x1a+google/cloud/talent_v4beta1/proto/job.proto\x1a#google/longrunning/operations.proto\x1a\x19google/protobuf/any.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/rpc/status.proto"[\n\x10\x43reateJobRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x32\n\x03job\x18\x02 \x01(\x0b\x32 .google.cloud.talent.v4beta1.JobB\x03\xe0\x41\x02""\n\rGetJobRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"w\n\x10UpdateJobRequest\x12\x32\n\x03job\x18\x01 \x01(\x0b\x32 .google.cloud.talent.v4beta1.JobB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"%\n\x10\x44\x65leteJobRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"B\n\x16\x42\x61tchDeleteJobsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x9a\x01\n\x0fListJobsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x36\n\x08job_view\x18\x05 \x01(\x0e\x32$.google.cloud.talent.v4beta1.JobView"\x9c\x01\n\x10ListJobsResponse\x12.\n\x04jobs\x18\x01 \x03(\x0b\x32 .google.cloud.talent.v4beta1.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata"\xb4\t\n\x11SearchJobsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12N\n\x0bsearch_mode\x18\x02 \x01(\x0e\x32\x39.google.cloud.talent.v4beta1.SearchJobsRequest.SearchMode\x12K\n\x10request_metadata\x18\x03 \x01(\x0b\x32,.google.cloud.talent.v4beta1.RequestMetadataB\x03\xe0\x41\x02\x12\x38\n\tjob_query\x18\x04 \x01(\x0b\x32%.google.cloud.talent.v4beta1.JobQuery\x12\x19\n\x11\x65nable_broadening\x18\x05 \x01(\x08\x12#\n\x1brequire_precise_result_size\x18\x06 \x01(\x08\x12\x46\n\x11histogram_queries\x18\x07 \x03(\x0b\x32+.google.cloud.talent.v4beta1.HistogramQuery\x12\x36\n\x08job_view\x18\x08 \x01(\x0e\x32$.google.cloud.talent.v4beta1.JobView\x12\x0e\n\x06offset\x18\t \x01(\x05\x12\x11\n\tpage_size\x18\n \x01(\x05\x12\x12\n\npage_token\x18\x0b \x01(\t\x12\x10\n\x08order_by\x18\x0c \x01(\t\x12\x62\n\x15\x64iversification_level\x18\r \x01(\x0e\x32\x43.google.cloud.talent.v4beta1.SearchJobsRequest.DiversificationLevel\x12]\n\x13\x63ustom_ranking_info\x18\x0e \x01(\x0b\x32@.google.cloud.talent.v4beta1.SearchJobsRequest.CustomRankingInfo\x12\x1d\n\x15\x64isable_keyword_match\x18\x10 \x01(\x08\x1a\x9a\x02\n\x11\x43ustomRankingInfo\x12o\n\x10importance_level\x18\x01 \x01(\x0e\x32P.google.cloud.talent.v4beta1.SearchJobsRequest.CustomRankingInfo.ImportanceLevelB\x03\xe0\x41\x02\x12\x1f\n\x12ranking_expression\x18\x02 \x01(\tB\x03\xe0\x41\x02"s\n\x0fImportanceLevel\x12 \n\x1cIMPORTANCE_LEVEL_UNSPECIFIED\x10\x00\x12\x08\n\x04NONE\x10\x01\x12\x07\n\x03LOW\x10\x02\x12\x08\n\x04MILD\x10\x03\x12\n\n\x06MEDIUM\x10\x04\x12\x08\n\x04HIGH\x10\x05\x12\x0b\n\x07\x45XTREME\x10\x06"R\n\nSearchMode\x12\x1b\n\x17SEARCH_MODE_UNSPECIFIED\x10\x00\x12\x0e\n\nJOB_SEARCH\x10\x01\x12\x17\n\x13\x46\x45\x41TURED_JOB_SEARCH\x10\x02"W\n\x14\x44iversificationLevel\x12%\n!DIVERSIFICATION_LEVEL_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01\x12\n\n\x06SIMPLE\x10\x02"\xd7\x06\n\x12SearchJobsResponse\x12R\n\rmatching_jobs\x18\x01 \x03(\x0b\x32;.google.cloud.talent.v4beta1.SearchJobsResponse.MatchingJob\x12R\n\x17histogram_query_results\x18\x02 \x03(\x0b\x32\x31.google.cloud.talent.v4beta1.HistogramQueryResult\x12\x17\n\x0fnext_page_token\x18\x03 \x01(\t\x12?\n\x10location_filters\x18\x04 \x03(\x0b\x32%.google.cloud.talent.v4beta1.Location\x12\x1c\n\x14\x65stimated_total_size\x18\x05 \x01(\x05\x12\x12\n\ntotal_size\x18\x06 \x01(\x05\x12?\n\x08metadata\x18\x07 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata\x12"\n\x1a\x62roadened_query_jobs_count\x18\x08 \x01(\x05\x12I\n\x10spell_correction\x18\t \x01(\x0b\x32/.google.cloud.talent.v4beta1.SpellingCorrection\x1a\xdc\x01\n\x0bMatchingJob\x12-\n\x03job\x18\x01 \x01(\x0b\x32 .google.cloud.talent.v4beta1.Job\x12\x13\n\x0bjob_summary\x18\x02 \x01(\t\x12\x19\n\x11job_title_snippet\x18\x03 \x01(\t\x12\x1b\n\x13search_text_snippet\x18\x04 \x01(\t\x12Q\n\x0c\x63ommute_info\x18\x05 \x01(\x0b\x32;.google.cloud.talent.v4beta1.SearchJobsResponse.CommuteInfo\x1a~\n\x0b\x43ommuteInfo\x12;\n\x0cjob_location\x18\x01 \x01(\x0b\x32%.google.cloud.talent.v4beta1.Location\x12\x32\n\x0ftravel_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"b\n\x16\x42\x61tchCreateJobsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x04jobs\x18\x02 \x03(\x0b\x32 .google.cloud.talent.v4beta1.JobB\x03\xe0\x41\x02"\x8e\x01\n\x16\x42\x61tchUpdateJobsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12.\n\x04jobs\x18\x02 \x03(\x0b\x32 .google.cloud.talent.v4beta1.Job\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xc4\x01\n\x12JobOperationResult\x12N\n\x0bjob_results\x18\x01 \x03(\x0b\x32\x39.google.cloud.talent.v4beta1.JobOperationResult.JobResult\x1a^\n\tJobResult\x12-\n\x03job\x18\x01 \x01(\x0b\x32 .google.cloud.talent.v4beta1.Job\x12"\n\x06status\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status*v\n\x07JobView\x12\x18\n\x14JOB_VIEW_UNSPECIFIED\x10\x00\x12\x14\n\x10JOB_VIEW_ID_ONLY\x10\x01\x12\x14\n\x10JOB_VIEW_MINIMAL\x10\x02\x12\x12\n\x0eJOB_VIEW_SMALL\x10\x03\x12\x11\n\rJOB_VIEW_FULL\x10\x04\x32\x90\x11\n\nJobService\x12\xbc\x01\n\tCreateJob\x12-.google.cloud.talent.v4beta1.CreateJobRequest\x1a .google.cloud.talent.v4beta1.Job"^\x82\xd3\xe4\x93\x02X"+/v4beta1/{parent=projects/*/tenants/*}/jobs:\x01*Z&"!/v4beta1/{parent=projects/*}/jobs:\x01*\x12\xdd\x01\n\x0f\x42\x61tchCreateJobs\x12\x33.google.cloud.talent.v4beta1.BatchCreateJobsRequest\x1a\x1d.google.longrunning.Operation"v\x82\xd3\xe4\x93\x02p"7/v4beta1/{parent=projects/*/tenants/*}/jobs:batchCreate:\x01*Z2"-/v4beta1/{parent=projects/*}/jobs:batchCreate:\x01*\x12\xb0\x01\n\x06GetJob\x12*.google.cloud.talent.v4beta1.GetJobRequest\x1a .google.cloud.talent.v4beta1.Job"X\x82\xd3\xe4\x93\x02R\x12+/v4beta1/{name=projects/*/tenants/*/jobs/*}Z#\x12!/v4beta1/{name=projects/*/jobs/*}\x12\xc4\x01\n\tUpdateJob\x12-.google.cloud.talent.v4beta1.UpdateJobRequest\x1a .google.cloud.talent.v4beta1.Job"f\x82\xd3\xe4\x93\x02`2//v4beta1/{job.name=projects/*/tenants/*/jobs/*}:\x01*Z*2%/v4beta1/{job.name=projects/*/jobs/*}:\x01*\x12\xdd\x01\n\x0f\x42\x61tchUpdateJobs\x12\x33.google.cloud.talent.v4beta1.BatchUpdateJobsRequest\x1a\x1d.google.longrunning.Operation"v\x82\xd3\xe4\x93\x02p"7/v4beta1/{parent=projects/*/tenants/*}/jobs:batchUpdate:\x01*Z2"-/v4beta1/{parent=projects/*}/jobs:batchUpdate:\x01*\x12\xac\x01\n\tDeleteJob\x12-.google.cloud.talent.v4beta1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"X\x82\xd3\xe4\x93\x02R*+/v4beta1/{name=projects/*/tenants/*/jobs/*}Z#*!/v4beta1/{name=projects/*/jobs/*}\x12\xd3\x01\n\x0f\x42\x61tchDeleteJobs\x12\x33.google.cloud.talent.v4beta1.BatchDeleteJobsRequest\x1a\x16.google.protobuf.Empty"s\x82\xd3\xe4\x93\x02m"7/v4beta1/{parent=projects/*/tenants/*}/jobs:batchDelete:\x01*Z/"-/v4beta1/{parent=projects/*}/jobs:batchDelete\x12\xc1\x01\n\x08ListJobs\x12,.google.cloud.talent.v4beta1.ListJobsRequest\x1a-.google.cloud.talent.v4beta1.ListJobsResponse"X\x82\xd3\xe4\x93\x02R\x12+/v4beta1/{parent=projects/*/tenants/*}/jobsZ#\x12!/v4beta1/{parent=projects/*}/jobs\x12\xdb\x01\n\nSearchJobs\x12..google.cloud.talent.v4beta1.SearchJobsRequest\x1a/.google.cloud.talent.v4beta1.SearchJobsResponse"l\x82\xd3\xe4\x93\x02\x66"2/v4beta1/{parent=projects/*/tenants/*}/jobs:search:\x01*Z-"(/v4beta1/{parent=projects/*}/jobs:search:\x01*\x12\xf3\x01\n\x12SearchJobsForAlert\x12..google.cloud.talent.v4beta1.SearchJobsRequest\x1a/.google.cloud.talent.v4beta1.SearchJobsResponse"|\x82\xd3\xe4\x93\x02v":/v4beta1/{parent=projects/*/tenants/*}/jobs:searchForAlert:\x01*Z5"0/v4beta1/{parent=projects/*}/jobs:searchForAlert:\x01*\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB}\n\x1f\x63om.google.cloud.talent.v4beta1B\x0fJobServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_filters__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_histogram__pb2.DESCRIPTOR, @@ -102,8 +104,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3618, - serialized_end=3736, + serialized_start=3731, + serialized_end=3849, ) _sym_db.RegisterEnumDescriptor(_JOBVIEW) @@ -149,8 +151,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2041, - serialized_end=2156, + serialized_start=2139, + serialized_end=2254, ) _sym_db.RegisterEnumDescriptor(_SEARCHJOBSREQUEST_CUSTOMRANKINGINFO_IMPORTANCELEVEL) @@ -180,8 +182,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2158, - serialized_end=2240, + serialized_start=2256, + serialized_end=2338, ) _sym_db.RegisterEnumDescriptor(_SEARCHJOBSREQUEST_SEARCHMODE) @@ -207,8 +209,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2242, - serialized_end=2329, + serialized_start=2340, + serialized_end=2427, ) _sym_db.RegisterEnumDescriptor(_SEARCHJOBSREQUEST_DIVERSIFICATIONLEVEL) @@ -235,7 +237,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -253,7 +255,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -265,8 +267,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=516, - serialized_end=597, + serialized_start=549, + serialized_end=640, ) @@ -292,7 +294,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -304,8 +306,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=599, - serialized_end=628, + serialized_start=642, + serialized_end=676, ) @@ -331,7 +333,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -361,8 +363,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=630, - serialized_end=744, + serialized_start=678, + serialized_end=797, ) @@ -388,7 +390,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -400,8 +402,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=746, - serialized_end=778, + serialized_start=799, + serialized_end=836, ) @@ -427,7 +429,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -445,7 +447,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -457,8 +459,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=780, - serialized_end=836, + serialized_start=838, + serialized_end=904, ) @@ -484,7 +486,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -502,7 +504,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -568,8 +570,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=839, - serialized_end=983, + serialized_start=907, + serialized_end=1061, ) @@ -643,8 +645,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=986, - serialized_end=1142, + serialized_start=1064, + serialized_end=1220, ) @@ -670,7 +672,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -688,7 +690,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -700,8 +702,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1884, - serialized_end=2156, + serialized_start=1972, + serialized_end=2254, ) _SEARCHJOBSREQUEST = _descriptor.Descriptor( @@ -726,7 +728,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -762,7 +764,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -990,8 +992,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1145, - serialized_end=2329, + serialized_start=1223, + serialized_end=2427, ) @@ -1101,8 +1103,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2839, - serialized_end=3059, + serialized_start=2937, + serialized_end=3157, ) _SEARCHJOBSRESPONSE_COMMUTEINFO = _descriptor.Descriptor( @@ -1157,8 +1159,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3061, - serialized_end=3187, + serialized_start=3159, + serialized_end=3285, ) _SEARCHJOBSRESPONSE = _descriptor.Descriptor( @@ -1339,8 +1341,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2332, - serialized_end=3187, + serialized_start=2430, + serialized_end=3285, ) @@ -1366,7 +1368,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1384,7 +1386,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1396,8 +1398,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3189, - serialized_end=3277, + serialized_start=3287, + serialized_end=3385, ) @@ -1423,7 +1425,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1471,8 +1473,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3280, - serialized_end=3417, + serialized_start=3388, + serialized_end=3530, ) @@ -1528,8 +1530,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3522, - serialized_end=3616, + serialized_start=3635, + serialized_end=3729, ) _JOBOPERATIONRESULT = _descriptor.Descriptor( @@ -1566,8 +1568,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3420, - serialized_end=3616, + serialized_start=3533, + serialized_end=3729, ) _CREATEJOBREQUEST.fields_by_name[ @@ -1701,19 +1703,16 @@ dict( DESCRIPTOR=_CREATEJOBREQUEST, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Input only. - - Create job request. + __doc__="""Create job request. Attributes: parent: Required. The resource name of the tenant under which the job is created. The format is - "projects/{project\_id}/tenants/{tenant\_id}", for example, - "projects/api-test-project/tenant/foo". Tenant id is optional - and a default tenant is created if unspecified, for example, - "projects/api-test-project". + "projects/{project\_id}/tenants/{tenant\_id}". For example, + "projects/foo/tenant/bar". If tenant id is unspecified a + default tenant is created. For example, "projects/foo". job: Required. The Job to be created. """, @@ -1728,20 +1727,17 @@ dict( DESCRIPTOR=_GETJOBREQUEST, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Input only. - - Get job request. + __doc__="""Get job request. Attributes: name: Required. The resource name of the job to retrieve. The format is - "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}", - for example, "projects/api-test- - project/tenants/foo/jobs/1234". Tenant id is optional and the - default tenant is used if unspecified, for example, - "projects/api-test-project/jobs/1234". + "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}". + For example, "projects/foo/tenants/bar/jobs/baz". If tenant + id is unspecified, the default tenant is used. For example, + "projects/foo/jobs/bar". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.GetJobRequest) ), @@ -1754,19 +1750,16 @@ dict( DESCRIPTOR=_UPDATEJOBREQUEST, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Input only. - - Update job request. + __doc__="""Update job request. Attributes: job: Required. The Job to be updated. update_mask: - Optional but strongly recommended to be provided for the best - service experience. If [update\_mask][google.cloud.talent.v4b - eta1.UpdateJobRequest.update\_mask] is provided, only the - specified fields in + Strongly recommended for the best service experience. If [upd + ate\_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update + \_mask] is provided, only the specified fields in [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. Otherwise all the fields are updated. A field mask to restrict the fields that are updated. Only top level fields @@ -1783,20 +1776,17 @@ dict( DESCRIPTOR=_DELETEJOBREQUEST, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Input only. - - Delete job request. + __doc__="""Delete job request. Attributes: name: Required. The resource name of the job to be deleted. The format is - "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}", - for example, "projects/api-test- - project/tenants/foo/jobs/1234". Tenant id is optional and the - default tenant is used if unspecified, for example, - "projects/api-test-project/jobs/1234". + "projects/{project\_id}/tenants/{tenant\_id}/jobs/{job\_id}". + For example, "projects/foo/tenants/bar/jobs/baz". If tenant + id is unspecified, the default tenant is used. For example, + "projects/foo/jobs/bar". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.DeleteJobRequest) ), @@ -1809,25 +1799,22 @@ dict( DESCRIPTOR=_BATCHDELETEJOBSREQUEST, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Input only. - - Batch delete jobs request. + __doc__="""Batch delete jobs request. Attributes: parent: Required. The resource name of the tenant under which the job is created. The format is - "projects/{project\_id}/tenants/{tenant\_id}", for example, - "projects/api-test-project/tenant/foo". Tenant id is optional - and the default tenant is used if unspecified, for example, - "projects/api-test-project". + "projects/{project\_id}/tenants/{tenant\_id}". For example, + "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created. For example, "projects/foo". filter: Required. The filter string specifies the jobs to be deleted. Supported operator: =, AND The fields eligible for filtering are: - ``companyName`` (Required) - ``requisitionId`` - (Required) Sample Query: companyName = "projects/api-test- - project/companies/123" AND requisitionId = "req-1" + (Required) Sample Query: companyName = + "projects/foo/companies/bar" AND requisitionId = "req-1" """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.BatchDeleteJobsRequest) ), @@ -1840,45 +1827,42 @@ dict( DESCRIPTOR=_LISTJOBSREQUEST, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Input only. - - List jobs request. + __doc__="""List jobs request. Attributes: parent: Required. The resource name of the tenant under which the job is created. The format is - "projects/{project\_id}/tenants/{tenant\_id}", for example, - "projects/api-test-project/tenant/foo". Tenant id is optional - and the default tenant is used if unspecified, for example, - "projects/api-test-project". + "projects/{project\_id}/tenants/{tenant\_id}". For example, + "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created. For example, "projects/foo". filter: Required. The filter string specifies the jobs to be enumerated. Supported operator: =, AND The fields eligible for filtering are: - ``companyName`` (Required) - - ``requisitionId`` (Optional) - ``status`` (Optional) - Available values: OPEN, EXPIRED, ALL. Defaults to OPEN if - no value is specified. Sample Query: - companyName = - "projects/api-test-project/tenants/foo/companies/bar" - - companyName = "projects/api-test- - project/tenants/foo/companies/bar" AND requisitionId = - "req-1" - companyName = "projects/api-test- - project/tenants/foo/companies/bar" AND status = "EXPIRED" + ``requisitionId`` - ``status`` Available values: OPEN, + EXPIRED, ALL. Defaults to OPEN if no value is specified. + Sample Query: - companyName = + "projects/foo/tenants/bar/companies/baz" - companyName = + "projects/foo/tenants/bar/companies/baz" AND requisitionId + = "req-1" - companyName = + "projects/foo/tenants/bar/companies/baz" AND status = + "EXPIRED" page_token: - Optional. The starting point of a query result. + The starting point of a query result. page_size: - Optional. The maximum number of jobs to be returned per page - of results. If [job\_view][google.cloud.talent.v4beta1.ListJo - bsRequest.job\_view] is set to [JobView.JOB\_VIEW\_ID\_ONLY][g - oogle.cloud.talent.v4beta1.JobView.JOB\_VIEW\_ID\_ONLY], the - maximum allowed page size is 1000. Otherwise, the maximum - allowed page size is 100. Default is 100 if empty or a number - < 1 is specified. + The maximum number of jobs to be returned per page of results. + If [job\_view][google.cloud.talent.v4beta1.ListJobsRequest.job + \_view] is set to [JobView.JOB\_VIEW\_ID\_ONLY][google.cloud.t + alent.v4beta1.JobView.JOB\_VIEW\_ID\_ONLY], the maximum + allowed page size is 1000. Otherwise, the maximum allowed page + size is 100. Default is 100 if empty or a number < 1 is + specified. job_view: - Optional. The desired job attributes returned for jobs in the - search response. Defaults to [JobView.JOB\_VIEW\_FULL][google. - cloud.talent.v4beta1.JobView.JOB\_VIEW\_FULL] if no value is + The desired job attributes returned for jobs in the search + response. Defaults to [JobView.JOB\_VIEW\_FULL][google.cloud.t + alent.v4beta1.JobView.JOB\_VIEW\_FULL] if no value is specified. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.ListJobsRequest) @@ -1892,9 +1876,7 @@ dict( DESCRIPTOR=_LISTJOBSRESPONSE, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Output only. - - List jobs response. + __doc__="""List jobs response. Attributes: @@ -1922,9 +1904,7 @@ dict( DESCRIPTOR=_SEARCHJOBSREQUEST_CUSTOMRANKINGINFO, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Input only. - - Custom ranking information for + __doc__="""Custom ranking information for [SearchJobsRequest][google.cloud.talent.v4beta1.SearchJobsRequest]. @@ -1955,60 +1935,57 @@ ), DESCRIPTOR=_SEARCHJOBSREQUEST, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Input only. - - The Request body of the ``SearchJobs`` call. + __doc__="""The Request body of the ``SearchJobs`` call. Attributes: parent: Required. The resource name of the tenant to search within. - The format is "projects/{project\_id}/tenants/{tenant\_id}", - for example, "projects/api-test-project/tenant/foo". Tenant - id is optional and the default tenant is used if unspecified, - for example, "projects/api-test-project". + The format is "projects/{project\_id}/tenants/{tenant\_id}". + For example, "projects/foo/tenant/bar". If tenant id is + unspecified, a default tenant is created. For example, + "projects/foo". search_mode: - Optional. Mode of a search. Defaults to [SearchMode.JOB\_SEAR - CH][google.cloud.talent.v4beta1.SearchJobsRequest.SearchMode.J - OB\_SEARCH]. + Mode of a search. Defaults to [SearchMode.JOB\_SEARCH][google + .cloud.talent.v4beta1.SearchJobsRequest.SearchMode.JOB\_SEARCH + ]. request_metadata: Required. The meta information collected about the job searcher, used to improve the search quality of the service. The identifiers (such as ``user_id``) are provided by users, and must be unique and consistent. job_query: - Optional. Query used to search against jobs, such as keyword, - location filters, etc. + Query used to search against jobs, such as keyword, location + filters, etc. enable_broadening: - Optional. Controls whether to broaden the search when it - produces sparse results. Broadened queries append results to - the end of the matching results list. Defaults to false. + Controls whether to broaden the search when it produces sparse + results. Broadened queries append results to the end of the + matching results list. Defaults to false. require_precise_result_size: - Optional. Controls if the search job request requires the - return of a precise count of the first 300 results. Setting - this to ``true`` ensures consistency in the number of results - per page. Best practice is to set this value to true if a - client allows users to jump directly to a non-sequential - search results page. Enabling this flag may adversely impact + Controls if the search job request requires the return of a + precise count of the first 300 results. Setting this to + ``true`` ensures consistency in the number of results per + page. Best practice is to set this value to true if a client + allows users to jump directly to a non-sequential search + results page. Enabling this flag may adversely impact performance. Defaults to false. histogram_queries: - Optional. An expression specifies a histogram request against - matching jobs. Expression syntax is an aggregation function - call with histogram facets and other options. Available - aggregation function calls are: \* - ``count(string_histogram_facet)``: Count the number of - matching entities, for each distinct attribute value. \* - ``count(numeric_histogram_facet, list of buckets)``: Count the - number of matching entities within each bucket. Data types: - - Histogram facet: facet names with format - [a-zA-Z][a-zA-Z0-9\_]+. - String: string like "any string - with backslash escape for quote(")." - Number: whole number - and floating point number like 10, -1 and -0.01. - List: list - of elements with comma(,) separator surrounded by square - brackets, for example, [1, 2, 3] and ["one", "two", "three"]. - Built-in constants: - MIN (minimum number similar to java - Double.MIN\_VALUE) - MAX (maximum number similar to java - Double.MAX\_VALUE) Built-in functions: - bucket(start, + An expression specifies a histogram request against matching + jobs. Expression syntax is an aggregation function call with + histogram facets and other options. Available aggregation + function calls are: \* ``count(string_histogram_facet)``: + Count the number of matching entities, for each distinct + attribute value. \* ``count(numeric_histogram_facet, list of + buckets)``: Count the number of matching entities within each + bucket. Data types: - Histogram facet: facet names with + format [a-zA-Z][a-zA-Z0-9\_]+. - String: string like "any + string with backslash escape for quote(")." - Number: whole + number and floating point number like 10, -1 and -0.01. - + List: list of elements with comma(,) separator surrounded by + square brackets, for example, [1, 2, 3] and ["one", "two", + "three"]. Built-in constants: - MIN (minimum number similar + to java Double.MIN\_VALUE) - MAX (maximum number similar to + java Double.MAX\_VALUE) Built-in functions: - bucket(start, end[, label]): bucket built-in function creates a bucket with range of [start, end). Note that the end is exclusive, for example, bucket(1, MAX, "positive number") or bucket(1, @@ -2080,37 +2057,36 @@ numeric-custom-attribute"], [bucket(MIN, 0, "negative"), bucket(0, MAX, "non-negative"])`` job_view: - Optional. The desired job attributes returned for jobs in the - search response. Defaults to [JobView.JOB\_VIEW\_SMALL][google - .cloud.talent.v4beta1.JobView.JOB\_VIEW\_SMALL] if no value is + The desired job attributes returned for jobs in the search + response. Defaults to [JobView.JOB\_VIEW\_SMALL][google.cloud. + talent.v4beta1.JobView.JOB\_VIEW\_SMALL] if no value is specified. offset: - Optional. An integer that specifies the current offset (that - is, starting result location, amongst the jobs deemed by the - API as relevant) in search results. This field is only - considered if [page\_token][google.cloud.talent.v4beta1.Search - JobsRequest.page\_token] is unset. For example, 0 means to - return results starting from the first matching job, and 10 - means to return from the 11th job. This can be used for - pagination, (for example, pageSize = 10 and offset = 10 means - to return from the second page). + An integer that specifies the current offset (that is, + starting result location, amongst the jobs deemed by the API + as relevant) in search results. This field is only considered + if [page\_token][google.cloud.talent.v4beta1.SearchJobsRequest + .page\_token] is unset. For example, 0 means to return + results starting from the first matching job, and 10 means to + return from the 11th job. This can be used for pagination, + (for example, pageSize = 10 and offset = 10 means to return + from the second page). page_size: - Optional. A limit on the number of jobs returned in the search - results. Increasing this value above the default value of 10 - can increase search response time. The value can be between 1 - and 100. + A limit on the number of jobs returned in the search results. + Increasing this value above the default value of 10 can + increase search response time. The value can be between 1 and + 100. page_token: - Optional. The token specifying the current offset within - search results. See [SearchJobsResponse.next\_page\_token][goo - gle.cloud.talent.v4beta1.SearchJobsResponse.next\_page\_token] - for an explanation of how to obtain the next set of query - results. + The token specifying the current offset within search results. + See [SearchJobsResponse.next\_page\_token][google.cloud.talent + .v4beta1.SearchJobsResponse.next\_page\_token] for an + explanation of how to obtain the next set of query results. order_by: - Optional. The criteria determining how search results are - sorted. Default is ``"relevance desc"``. Supported options - are: - ``"relevance desc"``: By relevance descending, as - determined by the API algorithms. Relevance thresholding of - query results is only available with this ordering. - + The criteria determining how search results are sorted. + Default is ``"relevance desc"``. Supported options are: - + ``"relevance desc"``: By relevance descending, as determined + by the API algorithms. Relevance thresholding of query + results is only available with this ordering. - ``"posting_publish_time desc"``: By [Job.posting\_publish\_ time][google.cloud.talent.v4beta1.Job.posting\_publish\_time] descending. - ``"posting_update_time desc"``: By [Job.post @@ -2165,20 +2141,20 @@ [diversification\_level][google.cloud.talent.v4beta1.SearchJob sRequest.diversification\_level]. diversification_level: - Optional. Controls whether highly similar jobs are returned - next to each other in the search results. Jobs are identified - as highly similar based on their titles, job categories, and - locations. Highly similar results are clustered so that only - one representative job of the cluster is displayed to the job + Controls whether highly similar jobs are returned next to each + other in the search results. Jobs are identified as highly + similar based on their titles, job categories, and locations. + Highly similar results are clustered so that only one + representative job of the cluster is displayed to the job seeker higher up in the results, with the other jobs being displayed lower down in the results. Defaults to [Diversifica tionLevel.SIMPLE][google.cloud.talent.v4beta1.SearchJobsReques t.DiversificationLevel.SIMPLE] if no value is specified. custom_ranking_info: - Optional. Controls over how job documents get ranked on top of - existing relevance score (determined by API algorithm). + Controls over how job documents get ranked on top of existing + relevance score (determined by API algorithm). disable_keyword_match: - Optional. Controls whether to disable exact keyword match on + Controls whether to disable exact keyword match on [Job.title][google.cloud.talent.v4beta1.Job.title], [Job.descr iption][google.cloud.talent.v4beta1.Job.description], [Job.com pany\_display\_name][google.cloud.talent.v4beta1.Job.company\_ @@ -2217,9 +2193,7 @@ dict( DESCRIPTOR=_SEARCHJOBSRESPONSE_MATCHINGJOB, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Output only. - - Job entry with metadata inside + __doc__="""Job entry with metadata inside [SearchJobsResponse][google.cloud.talent.v4beta1.SearchJobsResponse]. @@ -2255,9 +2229,7 @@ dict( DESCRIPTOR=_SEARCHJOBSRESPONSE_COMMUTEINFO, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Output only. - - Commute details related to this job. + __doc__="""Commute details related to this job. Attributes: @@ -2274,9 +2246,7 @@ ), DESCRIPTOR=_SEARCHJOBSRESPONSE, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Output only. - - Response for SearchJob method. + __doc__="""Response for SearchJob method. Attributes: @@ -2346,10 +2316,9 @@ parent: Required. The resource name of the tenant under which the job is created. The format is - "projects/{project\_id}/tenants/{tenant\_id}", for example, - "projects/api-test-project/tenant/foo". Tenant id is optional - and a default tenant is created if unspecified, for example, - "projects/api-test-project". + "projects/{project\_id}/tenants/{tenant\_id}". For example, + "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created. For example, "projects/foo". jobs: Required. The jobs to be created. """, @@ -2371,23 +2340,22 @@ parent: Required. The resource name of the tenant under which the job is created. The format is - "projects/{project\_id}/tenants/{tenant\_id}", for example, - "projects/api-test-project/tenant/foo". Tenant id is optional - and the default tenant is used if unspecified, for example, - "projects/api-test-project". + "projects/{project\_id}/tenants/{tenant\_id}". For example, + "projects/foo/tenant/bar". If tenant id is unspecified, a + default tenant is created. For example, "projects/foo". jobs: - Required. The jobs to be updated. + The jobs to be updated. update_mask: - Optional but strongly recommended to be provided for the best - service experience, also increase latency when checking status - of batch operation. If [update\_mask][google.cloud.talent.v4b - eta1.BatchUpdateJobsRequest.update\_mask] is provided, only - the specified fields in [Job][google.cloud.talent.v4beta1.Job] - are updated. Otherwise all the fields are updated. A field - mask to restrict the fields that are updated. Only top level - fields of [Job][google.cloud.talent.v4beta1.Job] are - supported. If [update\_mask][google.cloud.talent.v4beta1.Batc - hUpdateJobsRequest.update\_mask] is provided, The + Strongly recommended for the best service experience. Be aware + that it will also increase latency when checking the status of + a batch operation. If [update\_mask][google.cloud.talent.v4be + ta1.BatchUpdateJobsRequest.update\_mask] is provided, only the + specified fields in [Job][google.cloud.talent.v4beta1.Job] are + updated. Otherwise all the fields are updated. A field mask + to restrict the fields that are updated. Only top level fields + of [Job][google.cloud.talent.v4beta1.Job] are supported. If [ + update\_mask][google.cloud.talent.v4beta1.BatchUpdateJobsReque + st.update\_mask] is provided, The [Job][google.cloud.talent.v4beta1.Job] inside [JobResult][goog le.cloud.talent.v4beta1.JobOperationResult.JobResult] will only contains fields that is updated, plus the Id of the Job. @@ -2432,9 +2400,7 @@ ), DESCRIPTOR=_JOBOPERATIONRESULT, __module__="google.cloud.talent_v4beta1.proto.job_service_pb2", - __doc__="""Output only. - - The result of + __doc__="""The result of [JobService.BatchCreateJobs][google.cloud.talent.v4beta1.JobService.BatchCreateJobs] or [JobService.BatchUpdateJobs][google.cloud.talent.v4beta1.JobService.BatchUpdateJobs] @@ -2457,6 +2423,24 @@ DESCRIPTOR._options = None +_CREATEJOBREQUEST.fields_by_name["parent"]._options = None +_CREATEJOBREQUEST.fields_by_name["job"]._options = None +_GETJOBREQUEST.fields_by_name["name"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job"]._options = None +_DELETEJOBREQUEST.fields_by_name["name"]._options = None +_BATCHDELETEJOBSREQUEST.fields_by_name["parent"]._options = None +_BATCHDELETEJOBSREQUEST.fields_by_name["filter"]._options = None +_LISTJOBSREQUEST.fields_by_name["parent"]._options = None +_LISTJOBSREQUEST.fields_by_name["filter"]._options = None +_SEARCHJOBSREQUEST_CUSTOMRANKINGINFO.fields_by_name["importance_level"]._options = None +_SEARCHJOBSREQUEST_CUSTOMRANKINGINFO.fields_by_name[ + "ranking_expression" +]._options = None +_SEARCHJOBSREQUEST.fields_by_name["parent"]._options = None +_SEARCHJOBSREQUEST.fields_by_name["request_metadata"]._options = None +_BATCHCREATEJOBSREQUEST.fields_by_name["parent"]._options = None +_BATCHCREATEJOBSREQUEST.fields_by_name["jobs"]._options = None +_BATCHUPDATEJOBSREQUEST.fields_by_name["parent"]._options = None _JOBSERVICE = _descriptor.ServiceDescriptor( name="JobService", @@ -2466,8 +2450,8 @@ serialized_options=_b( "\312A\023jobs.googleapis.com\322AShttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobs" ), - serialized_start=3739, - serialized_end=5931, + serialized_start=3852, + serialized_end=6044, methods=[ _descriptor.MethodDescriptor( name="CreateJob", @@ -2480,10 +2464,21 @@ '\202\323\344\223\002X"+/v4beta1/{parent=projects/*/tenants/*}/jobs:\001*Z&"!/v4beta1/{parent=projects/*}/jobs:\001*' ), ), + _descriptor.MethodDescriptor( + name="BatchCreateJobs", + full_name="google.cloud.talent.v4beta1.JobService.BatchCreateJobs", + index=1, + containing_service=None, + input_type=_BATCHCREATEJOBSREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002p"7/v4beta1/{parent=projects/*/tenants/*}/jobs:batchCreate:\001*Z2"-/v4beta1/{parent=projects/*}/jobs:batchCreate:\001*' + ), + ), _descriptor.MethodDescriptor( name="GetJob", full_name="google.cloud.talent.v4beta1.JobService.GetJob", - index=1, + index=2, containing_service=None, input_type=_GETJOBREQUEST, output_type=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__pb2._JOB, @@ -2494,7 +2489,7 @@ _descriptor.MethodDescriptor( name="UpdateJob", full_name="google.cloud.talent.v4beta1.JobService.UpdateJob", - index=2, + index=3, containing_service=None, input_type=_UPDATEJOBREQUEST, output_type=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__pb2._JOB, @@ -2503,31 +2498,31 @@ ), ), _descriptor.MethodDescriptor( - name="DeleteJob", - full_name="google.cloud.talent.v4beta1.JobService.DeleteJob", - index=3, + name="BatchUpdateJobs", + full_name="google.cloud.talent.v4beta1.JobService.BatchUpdateJobs", + index=4, containing_service=None, - input_type=_DELETEJOBREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + input_type=_BATCHUPDATEJOBSREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\002R*+/v4beta1/{name=projects/*/tenants/*/jobs/*}Z#*!/v4beta1/{name=projects/*/jobs/*}" + '\202\323\344\223\002p"7/v4beta1/{parent=projects/*/tenants/*}/jobs:batchUpdate:\001*Z2"-/v4beta1/{parent=projects/*}/jobs:batchUpdate:\001*' ), ), _descriptor.MethodDescriptor( - name="ListJobs", - full_name="google.cloud.talent.v4beta1.JobService.ListJobs", - index=4, + name="DeleteJob", + full_name="google.cloud.talent.v4beta1.JobService.DeleteJob", + index=5, containing_service=None, - input_type=_LISTJOBSREQUEST, - output_type=_LISTJOBSRESPONSE, + input_type=_DELETEJOBREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002R\022+/v4beta1/{parent=projects/*/tenants/*}/jobsZ#\022!/v4beta1/{parent=projects/*}/jobs" + "\202\323\344\223\002R*+/v4beta1/{name=projects/*/tenants/*/jobs/*}Z#*!/v4beta1/{name=projects/*/jobs/*}" ), ), _descriptor.MethodDescriptor( name="BatchDeleteJobs", full_name="google.cloud.talent.v4beta1.JobService.BatchDeleteJobs", - index=5, + index=6, containing_service=None, input_type=_BATCHDELETEJOBSREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, @@ -2535,10 +2530,21 @@ '\202\323\344\223\002m"7/v4beta1/{parent=projects/*/tenants/*}/jobs:batchDelete:\001*Z/"-/v4beta1/{parent=projects/*}/jobs:batchDelete' ), ), + _descriptor.MethodDescriptor( + name="ListJobs", + full_name="google.cloud.talent.v4beta1.JobService.ListJobs", + index=7, + containing_service=None, + input_type=_LISTJOBSREQUEST, + output_type=_LISTJOBSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002R\022+/v4beta1/{parent=projects/*/tenants/*}/jobsZ#\022!/v4beta1/{parent=projects/*}/jobs" + ), + ), _descriptor.MethodDescriptor( name="SearchJobs", full_name="google.cloud.talent.v4beta1.JobService.SearchJobs", - index=6, + index=8, containing_service=None, input_type=_SEARCHJOBSREQUEST, output_type=_SEARCHJOBSRESPONSE, @@ -2549,7 +2555,7 @@ _descriptor.MethodDescriptor( name="SearchJobsForAlert", full_name="google.cloud.talent.v4beta1.JobService.SearchJobsForAlert", - index=7, + index=9, containing_service=None, input_type=_SEARCHJOBSREQUEST, output_type=_SEARCHJOBSRESPONSE, @@ -2557,28 +2563,6 @@ '\202\323\344\223\002v":/v4beta1/{parent=projects/*/tenants/*}/jobs:searchForAlert:\001*Z5"0/v4beta1/{parent=projects/*}/jobs:searchForAlert:\001*' ), ), - _descriptor.MethodDescriptor( - name="BatchCreateJobs", - full_name="google.cloud.talent.v4beta1.JobService.BatchCreateJobs", - index=8, - containing_service=None, - input_type=_BATCHCREATEJOBSREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - '\202\323\344\223\002p"7/v4beta1/{parent=projects/*/tenants/*}/jobs:batchCreate:\001*Z2"-/v4beta1/{parent=projects/*}/jobs:batchCreate:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="BatchUpdateJobs", - full_name="google.cloud.talent.v4beta1.JobService.BatchUpdateJobs", - index=9, - containing_service=None, - input_type=_BATCHUPDATEJOBSREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - '\202\323\344\223\002p"7/v4beta1/{parent=projects/*/tenants/*}/jobs:batchUpdate:\001*Z2"-/v4beta1/{parent=projects/*}/jobs:batchUpdate:\001*' - ), - ), ], ) _sym_db.RegisterServiceDescriptor(_JOBSERVICE) diff --git a/talent/google/cloud/talent_v4beta1/proto/job_service_pb2_grpc.py b/talent/google/cloud/talent_v4beta1/proto/job_service_pb2_grpc.py index a3b63f49e71e..54159d1f1fce 100644 --- a/talent/google/cloud/talent_v4beta1/proto/job_service_pb2_grpc.py +++ b/talent/google/cloud/talent_v4beta1/proto/job_service_pb2_grpc.py @@ -28,6 +28,11 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.CreateJobRequest.SerializeToString, response_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__pb2.Job.FromString, ) + self.BatchCreateJobs = channel.unary_unary( + "/google.cloud.talent.v4beta1.JobService/BatchCreateJobs", + request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.BatchCreateJobsRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) self.GetJob = channel.unary_unary( "/google.cloud.talent.v4beta1.JobService/GetJob", request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.GetJobRequest.SerializeToString, @@ -38,21 +43,26 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.UpdateJobRequest.SerializeToString, response_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__pb2.Job.FromString, ) + self.BatchUpdateJobs = channel.unary_unary( + "/google.cloud.talent.v4beta1.JobService/BatchUpdateJobs", + request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.BatchUpdateJobsRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) self.DeleteJob = channel.unary_unary( "/google.cloud.talent.v4beta1.JobService/DeleteJob", request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.DeleteJobRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) - self.ListJobs = channel.unary_unary( - "/google.cloud.talent.v4beta1.JobService/ListJobs", - request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.ListJobsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.ListJobsResponse.FromString, - ) self.BatchDeleteJobs = channel.unary_unary( "/google.cloud.talent.v4beta1.JobService/BatchDeleteJobs", request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.BatchDeleteJobsRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) + self.ListJobs = channel.unary_unary( + "/google.cloud.talent.v4beta1.JobService/ListJobs", + request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.ListJobsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.ListJobsResponse.FromString, + ) self.SearchJobs = channel.unary_unary( "/google.cloud.talent.v4beta1.JobService/SearchJobs", request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.SearchJobsRequest.SerializeToString, @@ -63,16 +73,6 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.SearchJobsRequest.SerializeToString, response_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.SearchJobsResponse.FromString, ) - self.BatchCreateJobs = channel.unary_unary( - "/google.cloud.talent.v4beta1.JobService/BatchCreateJobs", - request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.BatchCreateJobsRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - self.BatchUpdateJobs = channel.unary_unary( - "/google.cloud.talent.v4beta1.JobService/BatchUpdateJobs", - request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.BatchUpdateJobsRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) class JobServiceServicer(object): @@ -89,6 +89,13 @@ def CreateJob(self, request, context): context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") + def BatchCreateJobs(self, request, context): + """Begins executing a batch create jobs operation. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + def GetJob(self, request, context): """Retrieves the specified job, whose status is OPEN or recently EXPIRED within the last 90 days. @@ -107,6 +114,13 @@ def UpdateJob(self, request, context): context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") + def BatchUpdateJobs(self, request, context): + """Begins executing a batch update jobs operation. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + def DeleteJob(self, request, context): """Deletes the specified job. @@ -117,15 +131,15 @@ def DeleteJob(self, request, context): context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") - def ListJobs(self, request, context): - """Lists jobs by filter. + def BatchDeleteJobs(self, request, context): + """Deletes a list of [Job][google.cloud.talent.v4beta1.Job]s by filter. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") - def BatchDeleteJobs(self, request, context): - """Deletes a list of [Job][google.cloud.talent.v4beta1.Job]s by filter. + def ListJobs(self, request, context): + """Lists jobs by filter. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -162,20 +176,6 @@ def SearchJobsForAlert(self, request, context): context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") - def BatchCreateJobs(self, request, context): - """Begins executing a batch create jobs operation. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def BatchUpdateJobs(self, request, context): - """Begins executing a batch update jobs operation. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - def add_JobServiceServicer_to_server(servicer, server): rpc_method_handlers = { @@ -184,6 +184,11 @@ def add_JobServiceServicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.CreateJobRequest.FromString, response_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__pb2.Job.SerializeToString, ), + "BatchCreateJobs": grpc.unary_unary_rpc_method_handler( + servicer.BatchCreateJobs, + request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.BatchCreateJobsRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), "GetJob": grpc.unary_unary_rpc_method_handler( servicer.GetJob, request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.GetJobRequest.FromString, @@ -194,21 +199,26 @@ def add_JobServiceServicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.UpdateJobRequest.FromString, response_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__pb2.Job.SerializeToString, ), + "BatchUpdateJobs": grpc.unary_unary_rpc_method_handler( + servicer.BatchUpdateJobs, + request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.BatchUpdateJobsRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), "DeleteJob": grpc.unary_unary_rpc_method_handler( servicer.DeleteJob, request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.DeleteJobRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), - "ListJobs": grpc.unary_unary_rpc_method_handler( - servicer.ListJobs, - request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.ListJobsRequest.FromString, - response_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.ListJobsResponse.SerializeToString, - ), "BatchDeleteJobs": grpc.unary_unary_rpc_method_handler( servicer.BatchDeleteJobs, request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.BatchDeleteJobsRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), + "ListJobs": grpc.unary_unary_rpc_method_handler( + servicer.ListJobs, + request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.ListJobsRequest.FromString, + response_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.ListJobsResponse.SerializeToString, + ), "SearchJobs": grpc.unary_unary_rpc_method_handler( servicer.SearchJobs, request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.SearchJobsRequest.FromString, @@ -219,16 +229,6 @@ def add_JobServiceServicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.SearchJobsRequest.FromString, response_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.SearchJobsResponse.SerializeToString, ), - "BatchCreateJobs": grpc.unary_unary_rpc_method_handler( - servicer.BatchCreateJobs, - request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.BatchCreateJobsRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), - "BatchUpdateJobs": grpc.unary_unary_rpc_method_handler( - servicer.BatchUpdateJobs, - request_deserializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_job__service__pb2.BatchUpdateJobsRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ), } generic_handler = grpc.method_handlers_generic_handler( "google.cloud.talent.v4beta1.JobService", rpc_method_handlers diff --git a/talent/google/cloud/talent_v4beta1/proto/profile.proto b/talent/google/cloud/talent_v4beta1/proto/profile.proto index 819f13a62e62..9d3c0a696f61 100644 --- a/talent/google/cloud/talent_v4beta1/proto/profile.proto +++ b/talent/google/cloud/talent_v4beta1/proto/profile.proto @@ -43,17 +43,17 @@ message Profile { // // The format is // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", - // for example, "projects/api-test-project/tenants/foo/profiles/bar". + // for example, "projects/foo/tenants/bar/profiles/baz". string name = 1; - // Optional. Profile's id in client system if available. This value is not - // required to be unique to each profile. However, providing unique values - // makes it easier to specify individual profiles when filing support tickets. + // Profile's id in client system, if available. This value is unique for each + // profile inside a tenant. An error is thrown if another profile with the + // same external_id is created. // // The maximum number of bytes allowed is 100. string external_id = 2; - // Optional. The source description indicating where the profile is acquired. + // The source description indicating where the profile is acquired. // // For example, if a candidate profile is acquired from a resume, the user can // input "resume" here to indicate the source. @@ -61,14 +61,13 @@ message Profile { // The maximum number of bytes allowed is 100. string source = 3; - // Optional. The URI set by clients that links to this profile's client-side - // copy. + // The URI set by clients that links to this profile's client-side copy. // // The maximum number of bytes allowed is 4000. string uri = 4; - // Optional. The cluster id of the profile to associate with other profile(s) - // for the same candidate. + // The cluster id of the profile to associate with other profile(s) for the + // same candidate. // // This field should be generated by the customer. If a value is not provided, // a random UUID is assigned to this field of the profile. @@ -82,24 +81,58 @@ message Profile { // referring to the same candidate. string group_id = 5; - // Optional. Indicates the hirable status of the candidate. + // Indicates the hirable status of the candidate. google.protobuf.BoolValue is_hirable = 6; - // Optional. The timestamp when the profile was first created at this source. + // The timestamp when the profile was first created at this source. google.protobuf.Timestamp create_time = 7; - // Optional. The timestamp when the profile was last updated at this source. + // The timestamp when the profile was last updated at this source. google.protobuf.Timestamp update_time = 8; - // Optional. The resume representing this profile. + // The timestamp when the profile was last updated as a result of a direct or + // indirect action by a candidate. + // + // These actions include: + // + // * Direct actions such as the candidate submitting a new resume as part of a + // job application to the agency, using a self-service tool such as a website + // to update their profile, and so on. + // * Indirect actions by the candidate such as uploading a resume to a job + // board that is collected by the agency through a feed, providing a resume to + // a recruiter who then uploads it into the ATS, and so on. + // * Updates made to the candidate's profile by the recruiter as a result of + // interacting with the candidate (for example adding a skill or work + // preference, and so on). Changes to + // [recruiting_notes][google.cloud.talent.v4beta1.Profile.recruiting_notes] + // are specifically excluded from this action type. + // + // Note: + // [candidate_update_time][google.cloud.talent.v4beta1.Profile.candidate_update_time] + // must be greater than or equal to + // [resume_update_time][google.cloud.talent.v4beta1.Profile.resume_update_time] + // or an error is thrown. + google.protobuf.Timestamp candidate_update_time = 67; + + // The timestamp when the candidate's resume was added or updated on the + // candidate's profile. Whether that resume was directly uploaded by a + // candidate, pulled from a 3rd party job board feed, added by a recruiter, + // and so on. + // + // If this field is updated, it's expected that + // [resume][google.cloud.talent.v4beta1.Profile.resume] is provided in the + // create or update calls. + google.protobuf.Timestamp resume_update_time = 68; + + // The resume representing this profile. Resume resume = 53; - // Optional. The names of the candidate this profile references. + // The names of the candidate this profile references. // // Currently only one person name is supported. repeated PersonName person_names = 11; - // Optional. The candidate's postal addresses. It's highly recommended to + // The candidate's postal addresses. It's highly recommended to // input this information as accurately as possible to help improve search // quality. Here are some recommendations: // @@ -129,16 +162,16 @@ message Profile { // not set. repeated Address addresses = 12; - // Optional. The candidate's email addresses. + // The candidate's email addresses. repeated Email email_addresses = 13; - // Optional. The candidate's phone number(s). + // The candidate's phone number(s). repeated Phone phone_numbers = 14; - // Optional. The candidate's personal URIs. + // The candidate's personal URIs. repeated PersonalUri personal_uris = 15; - // Optional. Available contact information besides + // Available contact information besides // [addresses][google.cloud.talent.v4beta1.Profile.addresses], // [email_addresses][google.cloud.talent.v4beta1.Profile.email_addresses], // [phone_numbers][google.cloud.talent.v4beta1.Profile.phone_numbers] and @@ -146,9 +179,9 @@ message Profile { // example, Hang-out, Skype. repeated AdditionalContactInfo additional_contact_info = 16; - // Optional. The employment history records of the candidate. It's highly - // recommended to input this information as accurately as possible to help - // improve search quality. Here are some recommendations: + // The employment history records of the candidate. It's highly recommended + // to input this information as accurately as possible to help improve search + // quality. Here are some recommendations: // // * Specify the start and end dates of the employment records. // * List different employment types separately, no matter how minor the @@ -163,9 +196,9 @@ message Profile { // The limitation for max number of employment records is 100. repeated EmploymentRecord employment_records = 17; - // Optional. The education history record of the candidate. It's highly - // recommended to input this information as accurately as possible to help - // improve search quality. Here are some recommendations: + // The education history record of the candidate. It's highly recommended to + // input this information as accurately as possible to help improve search + // quality. Here are some recommendations: // // * Specify the start and end dates of the education records. // * List each education type separately, no matter how minor the change is. @@ -179,40 +212,40 @@ message Profile { // The limitation for max number of education records is 100. repeated EducationRecord education_records = 18; - // Optional. The skill set of the candidate. It's highly recommended to - // provide as much information as possible to help improve the search quality. + // The skill set of the candidate. It's highly recommended to provide as + // much information as possible to help improve the search quality. // // The limitation for max number of skills is 500. repeated Skill skills = 19; - // Optional. The individual or collaborative activities which the candidate - // has participated in, for example, open-source projects, class assignments - // that aren't listed in + // The individual or collaborative activities which the candidate has + // participated in, for example, open-source projects, class assignments that + // aren't listed in // [employment_records][google.cloud.talent.v4beta1.Profile.employment_records]. // // The limitation for max number of activities is 50. repeated Activity activities = 20; - // Optional. The publications published by the candidate. + // The publications published by the candidate. // // The limitation for max number of publications is 50. repeated Publication publications = 21; - // Optional. The patents acquired by the candidate. + // The patents acquired by the candidate. repeated Patent patents = 22; - // Optional. The certifications acquired by the candidate. + // The certifications acquired by the candidate. repeated Certification certifications = 23; // Output only. The resource names of the candidate's applications. - repeated string applications = 47; + repeated string applications = 47 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The resource names of the candidate's assignments. - repeated string assignments = 48; + repeated string assignments = 48 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Optional. A map of fields to hold both filterable and non-filterable custom - // profile attributes that aren't covered by the provided structured fields. - // See [CustomAttribute][google.cloud.talent.v4beta1.CustomAttribute] for more + // A map of fields to hold both filterable and non-filterable custom profile + // attributes that aren't covered by the provided structured fields. See + // [CustomAttribute][google.cloud.talent.v4beta1.CustomAttribute] for more // details. // // At most 100 filterable and at most 100 unfilterable keys are supported. If @@ -234,12 +267,16 @@ message Profile { // Output only. Indicates if a summarized profile was created as part of the // profile creation API call. This flag does not indicate whether a profile is // searchable or not. - bool processed = 27; + bool processed = 27 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Keyword snippet shows how the search result is related to a // search query. This is only returned in // [SearchProfilesResponse][google.cloud.talent.v4beta1.SearchProfilesResponse]. - string keyword_snippet = 28; + string keyword_snippet = 28 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Candidate's availability signals. + repeated AvailabilitySignal availability_signals = 70 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Derived locations of the profile, resolved from // [Profile.addresses][google.cloud.talent.v4beta1.Profile.addresses]. @@ -252,6 +289,28 @@ message Profile { [(google.api.field_behavior) = OUTPUT_ONLY]; } +// Candidate availability signal. +message AvailabilitySignal { + // Type of signal. + AvailabilitySignalType type = 1; + + // Timestamp of when the given availability activity last happened. + google.protobuf.Timestamp last_update_time = 2; + + // Indicates if the + // [last_update_time][google.cloud.talent.v4beta1.AvailabilitySignal.last_update_time] + // is within + // [AvailabilityFilter.range][google.cloud.talent.v4beta1.AvailabilityFilter.range]. + // + // Returned only in a search response when there is an + // [AvailabilityFilter][google.cloud.talent.v4beta1.AvailabilityFilter] in + // [ProfileQuery.availability_filters][google.cloud.talent.v4beta1.ProfileQuery.availability_filters] + // where + // [signal_type][google.cloud.talent.v4beta1.AvailabilityFilter.signal_type] + // matches [type][google.cloud.talent.v4beta1.AvailabilitySignal.type]. + google.protobuf.BoolValue filter_satisfied = 3; +} + // Resource that represents a resume. message Resume { // The format of a structured resume. @@ -268,7 +327,7 @@ message Resume { OTHER_RESUME_TYPE = 2; } - // Optional. Users can create a profile with only this field field, if + // Users can create a profile with only this field field, if // [resume_type][google.cloud.talent.v4beta1.Resume.resume_type] is // [HRXML][google.cloud.talent.v4beta1.Resume.ResumeType.HRXML]. For example, // the API parses this field and creates a profile with all structured fields @@ -282,7 +341,7 @@ message Resume { // SLA. string structured_resume = 1; - // Optional. The format of + // The format of // [structured_resume][google.cloud.talent.v4beta1.Resume.structured_resume]. ResumeType resume_type = 2; } @@ -291,7 +350,7 @@ message Resume { message PersonName { // Resource that represents a person's structured name. message PersonStructuredName { - // Optional. Given/first name. + // Given/first name. // // It's derived from // [formatted_name][google.cloud.talent.v4beta1.PersonName.formatted_name] @@ -300,12 +359,12 @@ message PersonName { // Number of characters allowed is 100. string given_name = 1; - // Optional. Preferred given/first name or nickname. + // Preferred given/first name or nickname. // // Number of characters allowed is 100. string preferred_name = 6; - // Optional. Middle initial. + // Middle initial. // // It's derived from // [formatted_name][google.cloud.talent.v4beta1.PersonName.formatted_name] @@ -314,7 +373,7 @@ message PersonName { // Number of characters allowed is 20. string middle_initial = 2; - // Optional. Family/last name. + // Family/last name. // // It's derived from // [formatted_name][google.cloud.talent.v4beta1.PersonName.formatted_name] @@ -323,12 +382,12 @@ message PersonName { // Number of characters allowed is 100. string family_name = 3; - // Optional. Suffixes. + // Suffixes. // // Number of characters allowed is 20. repeated string suffixes = 4; - // Optional. Prefixes. + // Prefixes. // // Number of characters allowed is 20. repeated string prefixes = 5; @@ -338,18 +397,17 @@ message PersonName { // [formatted_name][google.cloud.talent.v4beta1.PersonName.formatted_name] or // [structured_name][google.cloud.talent.v4beta1.PersonName.structured_name]. oneof person_name { - // Optional. A string represents a person's full name. For example, "Dr. - // John Smith". + // A string represents a person's full name. For example, "Dr. John Smith". // // Number of characters allowed is 100. string formatted_name = 1; - // Optional. A person's name in a structured way (last name, first name, - // suffix, and so on.) + // A person's name in a structured way (last name, first name, suffix, and + // so on.) PersonStructuredName structured_name = 2; } - // Optional. Preferred name for the person. This field is ignored if + // Preferred name for the person. This field is ignored if // [structured_name][google.cloud.talent.v4beta1.PersonName.structured_name] // is provided. // @@ -359,7 +417,7 @@ message PersonName { // Resource that represents a address. message Address { - // Optional. The usage of the address. For example, SCHOOL, WORK, PERSONAL. + // The usage of the address. For example, SCHOOL, WORK, PERSONAL. ContactInfoUsage usage = 1; // The address of a person. It can be one of @@ -367,7 +425,7 @@ message Address { // or // [structured_address][google.cloud.talent.v4beta1.Address.structured_address]. oneof address { - // Optional. Unstructured address. + // Unstructured address. // // For example, "1600 Amphitheatre Pkwy, Mountain View, CA 94043", // "Sunnyvale, California". @@ -375,22 +433,21 @@ message Address { // Number of characters allowed is 100. string unstructured_address = 2; - // Optional. Structured address that contains street address, city, state, - // country, and so on. + // Structured address that contains street address, city, state, country, + // and so on. google.type.PostalAddress structured_address = 3; } - // Optional. Indicates if it's the person's current address. + // Indicates if it's the person's current address. google.protobuf.BoolValue current = 4; } // Resource that represents a person's email address. message Email { - // Optional. The usage of the email address. For example, SCHOOL, WORK, - // PERSONAL. + // The usage of the email address. For example, SCHOOL, WORK, PERSONAL. ContactInfoUsage usage = 1; - // Optional. Email address. + // Email address. // // Number of characters allowed is 4,000. string email_address = 2; @@ -438,13 +495,13 @@ message Phone { MOBILE_OR_LANDLINE = 9; } - // Optional. The usage of the phone. For example, SCHOOL, WORK, PERSONAL. + // The usage of the phone. For example, SCHOOL, WORK, PERSONAL. ContactInfoUsage usage = 1; - // Optional. The phone type. For example, LANDLINE, MOBILE, FAX. + // The phone type. For example, LANDLINE, MOBILE, FAX. PhoneType type = 2; - // Optional. Phone number. + // Phone number. // // Any phone formats are supported and only exact matches are performed on // searches. For example, if a phone number in profile is provided in the @@ -454,8 +511,7 @@ message Phone { // Number of characters allowed is 20. string number = 3; - // Optional. When this number is available. Any descriptive string is - // expected. + // When this number is available. Any descriptive string is expected. // // Number of characters allowed is 100. string when_available = 4; @@ -463,7 +519,7 @@ message Phone { // Resource that represents a valid URI for a personal use. message PersonalUri { - // Optional. The personal URI. + // The personal URI. // // Number of characters allowed is 4,000. string uri = 1; @@ -472,18 +528,17 @@ message PersonalUri { // Resource that represents contact information other than phone, email, // URI and addresses. message AdditionalContactInfo { - // Optional. The usage of this contact method. For example, SCHOOL, WORK, - // PERSONAL. + // The usage of this contact method. For example, SCHOOL, WORK, PERSONAL. ContactInfoUsage usage = 1; - // Optional. The name of the contact method. + // The name of the contact method. // // For example, "hangout", "skype". // // Number of characters allowed is 100. string name = 2; - // Optional. The contact id. + // The contact id. // // Number of characters allowed is 100. string contact_id = 3; @@ -491,48 +546,48 @@ message AdditionalContactInfo { // Resource that represents an employment record of a candidate. message EmploymentRecord { - // Optional. Start date of the employment. + // Start date of the employment. google.type.Date start_date = 1; - // Optional. End date of the employment. + // End date of the employment. google.type.Date end_date = 2; - // Optional. The name of the employer company/organization. + // The name of the employer company/organization. // // For example, "Google", "Alphabet", and so on. // // Number of characters allowed is 250. string employer_name = 3; - // Optional. The division name of the employment. + // The division name of the employment. // // For example, division, department, client, and so on. // // Number of characters allowed is 100. string division_name = 4; - // Optional. The physical address of the employer. + // The physical address of the employer. Address address = 5; - // Optional. The job title of the employment. + // The job title of the employment. // // For example, "Software Engineer", "Data Scientist", and so on. // // Number of characters allowed is 250. string job_title = 6; - // Optional. The description of job content. + // The description of job content. // // Number of characters allowed is 100,000. string job_description = 7; - // Optional. If the jobs is a supervisor position. + // If the jobs is a supervisor position. google.protobuf.BoolValue is_supervisor = 8; - // Optional. If this employment is self-employed. + // If this employment is self-employed. google.protobuf.BoolValue is_self_employed = 9; - // Optional. If this employment is current. + // If this employment is current. google.protobuf.BoolValue is_current = 10; // Output only. The job title snippet shows how the @@ -540,42 +595,43 @@ message EmploymentRecord { // related to a search query. It's empty if the // [job_title][google.cloud.talent.v4beta1.EmploymentRecord.job_title] isn't // related to the search query. - string job_title_snippet = 11; + string job_title_snippet = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The job description snippet shows how the // [job_description][google.cloud.talent.v4beta1.EmploymentRecord.job_description] // is related to a search query. It's empty if the // [job_description][google.cloud.talent.v4beta1.EmploymentRecord.job_description] // isn't related to the search query. - string job_description_snippet = 12; + string job_description_snippet = 12 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The employer name snippet shows how the // [employer_name][google.cloud.talent.v4beta1.EmploymentRecord.employer_name] // is related to a search query. It's empty if the // [employer_name][google.cloud.talent.v4beta1.EmploymentRecord.employer_name] // isn't related to the search query. - string employer_name_snippet = 13; + string employer_name_snippet = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Resource that represents an education record of a candidate. message EducationRecord { - // Optional. The start date of the education. + // The start date of the education. google.type.Date start_date = 1; - // Optional. The end date of the education. + // The end date of the education. google.type.Date end_date = 2; - // Optional. The expected graduation date if currently pursuing a degree. + // The expected graduation date if currently pursuing a degree. google.type.Date expected_graduation_date = 3; - // Optional. The name of the school or institution. + // The name of the school or institution. // // For example, "Stanford University", "UC Berkeley", and so on. // // Number of characters allowed is 250. string school_name = 4; - // Optional. The physical address of the education institution. + // The physical address of the education institution. Address address = 5; // The degree information. It can be one of @@ -583,23 +639,23 @@ message EducationRecord { // or // [structured_degree][google.cloud.talent.v4beta1.EducationRecord.structured_degree]. oneof degree { - // Optional. The full description of the degree. + // The full description of the degree. // // For example, "Master of Science in Computer Science", "B.S in Math". // // Number of characters allowed is 100. string degree_description = 6; - // Optional. The structured notation of the degree. + // The structured notation of the degree. Degree structured_degree = 7; } - // Optional. The description of the education. + // The description of the education. // // Number of characters allowed is 100,000. string description = 8; - // Optional. If this education is current. + // If this education is current. google.protobuf.BoolValue is_current = 9; // Output only. The school name snippet shows how the @@ -607,29 +663,29 @@ message EducationRecord { // related to a search query in search result. It's empty if the // [school_name][google.cloud.talent.v4beta1.EducationRecord.school_name] // isn't related to the search query. - string school_name_snippet = 10; + string school_name_snippet = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The job description snippet shows how the // [Degree][google.cloud.talent.v4beta1.Degree] is related to a search query // in search result. It's empty if the // [Degree][google.cloud.talent.v4beta1.Degree] isn't related to the search // query. - string degree_snippet = 11; + string degree_snippet = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Resource that represents a degree pursuing or acquired by a candidate. message Degree { - // Optional. ISCED degree type. + // ISCED degree type. DegreeType degree_type = 1; - // Optional. Full Degree name. + // Full Degree name. // // For example, "B.S.", "Master of Arts", and so on. // // Number of characters allowed is 100. string degree_name = 2; - // Optional. Fields of study for the degree. + // Fields of study for the degree. // // For example, "Computer science", "engineering". // @@ -641,35 +697,35 @@ message Degree { // in by a candidate, for example, an open-source project, a class assignment, // and so on. message Activity { - // Optional. Activity display name. + // Activity display name. // // Number of characters allowed is 100. string display_name = 1; - // Optional. Activity description. + // Activity description. // // Number of characters allowed is 100,000. string description = 2; - // Optional. Activity URI. + // Activity URI. // // Number of characters allowed is 4,000. string uri = 3; - // Optional. The first creation date of the activity. + // The first creation date of the activity. google.type.Date create_date = 4; - // Optional. The last update date of the activity. + // The last update date of the activity. google.type.Date update_date = 5; - // Optional. A list of team members involved in this activity. + // A list of team members involved in this activity. // // Number of characters allowed is 100. // // The limitation for max number of team members is 50. repeated string team_members = 6; - // Optional. A list of skills used in this activity. + // A list of skills used in this activity. // // The limitation for max number of skills used is 50. repeated Skill skills_used = 7; @@ -679,64 +735,66 @@ message Activity { // related to a search query. It's empty if the // [display_name][google.cloud.talent.v4beta1.Activity.display_name] isn't // related to the search query. - string activity_name_snippet = 8; + string activity_name_snippet = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Activity description snippet shows how the // [description][google.cloud.talent.v4beta1.Activity.description] is related // to a search query. It's empty if the // [description][google.cloud.talent.v4beta1.Activity.description] isn't // related to the search query. - string activity_description_snippet = 9; + string activity_description_snippet = 9 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Skill used snippet shows how the corresponding // [skills_used][google.cloud.talent.v4beta1.Activity.skills_used] are related // to a search query. It's empty if the corresponding // [skills_used][google.cloud.talent.v4beta1.Activity.skills_used] are not // related to the search query. - repeated string skills_used_snippet = 10; + repeated string skills_used_snippet = 10 + [(google.api.field_behavior) = OUTPUT_ONLY]; } // Resource that represents a publication resource of a candidate. message Publication { - // Optional. A list of author names. + // A list of author names. // // Number of characters allowed is 100. repeated string authors = 1; - // Optional. The title of the publication. + // The title of the publication. // // Number of characters allowed is 100. string title = 2; - // Optional. The description of the publication. + // The description of the publication. // // Number of characters allowed is 100,000. string description = 3; - // Optional. The journal name of the publication. + // The journal name of the publication. // // Number of characters allowed is 100. string journal = 4; - // Optional. Volume number. + // Volume number. // // Number of characters allowed is 100. string volume = 5; - // Optional. The publisher of the journal. + // The publisher of the journal. // // Number of characters allowed is 100. string publisher = 6; - // Optional. The publication date. + // The publication date. google.type.Date publication_date = 7; - // Optional. The publication type. + // The publication type. // // Number of characters allowed is 100. string publication_type = 8; - // Optional. ISBN number. + // ISBN number. // // Number of characters allowed is 100. string isbn = 9; @@ -744,42 +802,42 @@ message Publication { // Resource that represents the patent acquired by a candidate. message Patent { - // Optional. Name of the patent. + // Name of the patent. // // Number of characters allowed is 100. string display_name = 1; - // Optional. A list of inventors' names. + // A list of inventors' names. // // Number of characters allowed for each is 100. repeated string inventors = 2; - // Optional. The status of the patent. + // The status of the patent. // // Number of characters allowed is 100. string patent_status = 3; - // Optional. The date the last time the status of the patent was checked. + // The date the last time the status of the patent was checked. google.type.Date patent_status_date = 4; - // Optional. The date that the patent was filed. + // The date that the patent was filed. google.type.Date patent_filing_date = 5; - // Optional. The name of the patent office. + // The name of the patent office. // // Number of characters allowed is 100. string patent_office = 6; - // Optional. The number of the patent. + // The number of the patent. // // Number of characters allowed is 100. string patent_number = 7; - // Optional. The description of the patent. + // The description of the patent. // // Number of characters allowed is 100,000. string patent_description = 8; - // Optional. The skills used in this patent. + // The skills used in this patent. repeated Skill skills_used = 9; } diff --git a/talent/google/cloud/talent_v4beta1/proto/profile_pb2.py b/talent/google/cloud/talent_v4beta1/proto/profile_pb2.py index 195bd143568d..0b7cfb9ce647 100644 --- a/talent/google/cloud/talent_v4beta1/proto/profile_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/profile_pb2.py @@ -35,7 +35,7 @@ "\n\037com.google.cloud.talent.v4beta1B\024ProfileResourceProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n/google/cloud/talent_v4beta1/proto/profile.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x16google/type/date.proto\x1a google/type/postal_address.proto"\xc1\x0b\n\x07Profile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x65xternal_id\x18\x02 \x01(\t\x12\x0e\n\x06source\x18\x03 \x01(\t\x12\x0b\n\x03uri\x18\x04 \x01(\t\x12\x10\n\x08group_id\x18\x05 \x01(\t\x12.\n\nis_hirable\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12/\n\x0b\x63reate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x06resume\x18\x35 \x01(\x0b\x32#.google.cloud.talent.v4beta1.Resume\x12=\n\x0cperson_names\x18\x0b \x03(\x0b\x32\'.google.cloud.talent.v4beta1.PersonName\x12\x37\n\taddresses\x18\x0c \x03(\x0b\x32$.google.cloud.talent.v4beta1.Address\x12;\n\x0f\x65mail_addresses\x18\r \x03(\x0b\x32".google.cloud.talent.v4beta1.Email\x12\x39\n\rphone_numbers\x18\x0e \x03(\x0b\x32".google.cloud.talent.v4beta1.Phone\x12?\n\rpersonal_uris\x18\x0f \x03(\x0b\x32(.google.cloud.talent.v4beta1.PersonalUri\x12S\n\x17\x61\x64\x64itional_contact_info\x18\x10 \x03(\x0b\x32\x32.google.cloud.talent.v4beta1.AdditionalContactInfo\x12I\n\x12\x65mployment_records\x18\x11 \x03(\x0b\x32-.google.cloud.talent.v4beta1.EmploymentRecord\x12G\n\x11\x65\x64ucation_records\x18\x12 \x03(\x0b\x32,.google.cloud.talent.v4beta1.EducationRecord\x12\x32\n\x06skills\x18\x13 \x03(\x0b\x32".google.cloud.talent.v4beta1.Skill\x12\x39\n\nactivities\x18\x14 \x03(\x0b\x32%.google.cloud.talent.v4beta1.Activity\x12>\n\x0cpublications\x18\x15 \x03(\x0b\x32(.google.cloud.talent.v4beta1.Publication\x12\x34\n\x07patents\x18\x16 \x03(\x0b\x32#.google.cloud.talent.v4beta1.Patent\x12\x42\n\x0e\x63\x65rtifications\x18\x17 \x03(\x0b\x32*.google.cloud.talent.v4beta1.Certification\x12\x14\n\x0c\x61pplications\x18/ \x03(\t\x12\x13\n\x0b\x61ssignments\x18\x30 \x03(\t\x12U\n\x11\x63ustom_attributes\x18\x1a \x03(\x0b\x32:.google.cloud.talent.v4beta1.Profile.CustomAttributesEntry\x12\x11\n\tprocessed\x18\x1b \x01(\x08\x12\x17\n\x0fkeyword_snippet\x18\x1c \x01(\t\x12\x45\n\x11\x64\x65rived_addresses\x18@ \x03(\x0b\x32%.google.cloud.talent.v4beta1.LocationB\x03\xe0\x41\x03\x1a\x65\n\x15\x43ustomAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12;\n\x05value\x18\x02 \x01(\x0b\x32,.google.cloud.talent.v4beta1.CustomAttribute:\x02\x38\x01"\xb5\x01\n\x06Resume\x12\x19\n\x11structured_resume\x18\x01 \x01(\t\x12\x43\n\x0bresume_type\x18\x02 \x01(\x0e\x32..google.cloud.talent.v4beta1.Resume.ResumeType"K\n\nResumeType\x12\x1b\n\x17RESUME_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05HRXML\x10\x01\x12\x15\n\x11OTHER_RESUME_TYPE\x10\x02"\xbc\x02\n\nPersonName\x12\x18\n\x0e\x66ormatted_name\x18\x01 \x01(\tH\x00\x12W\n\x0fstructured_name\x18\x02 \x01(\x0b\x32<.google.cloud.talent.v4beta1.PersonName.PersonStructuredNameH\x00\x12\x16\n\x0epreferred_name\x18\x03 \x01(\t\x1a\x93\x01\n\x14PersonStructuredName\x12\x12\n\ngiven_name\x18\x01 \x01(\t\x12\x16\n\x0epreferred_name\x18\x06 \x01(\t\x12\x16\n\x0emiddle_initial\x18\x02 \x01(\t\x12\x13\n\x0b\x66\x61mily_name\x18\x03 \x01(\t\x12\x10\n\x08suffixes\x18\x04 \x03(\t\x12\x10\n\x08prefixes\x18\x05 \x03(\tB\r\n\x0bperson_name"\xd9\x01\n\x07\x41\x64\x64ress\x12<\n\x05usage\x18\x01 \x01(\x0e\x32-.google.cloud.talent.v4beta1.ContactInfoUsage\x12\x1e\n\x14unstructured_address\x18\x02 \x01(\tH\x00\x12\x38\n\x12structured_address\x18\x03 \x01(\x0b\x32\x1a.google.type.PostalAddressH\x00\x12+\n\x07\x63urrent\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.BoolValueB\t\n\x07\x61\x64\x64ress"\\\n\x05\x45mail\x12<\n\x05usage\x18\x01 \x01(\x0e\x32-.google.cloud.talent.v4beta1.ContactInfoUsage\x12\x15\n\remail_address\x18\x02 \x01(\t"\xcf\x02\n\x05Phone\x12<\n\x05usage\x18\x01 \x01(\x0e\x32-.google.cloud.talent.v4beta1.ContactInfoUsage\x12:\n\x04type\x18\x02 \x01(\x0e\x32,.google.cloud.talent.v4beta1.Phone.PhoneType\x12\x0e\n\x06number\x18\x03 \x01(\t\x12\x16\n\x0ewhen_available\x18\x04 \x01(\t"\xa3\x01\n\tPhoneType\x12\x1a\n\x16PHONE_TYPE_UNSPECIFIED\x10\x00\x12\x0c\n\x08LANDLINE\x10\x01\x12\n\n\x06MOBILE\x10\x02\x12\x07\n\x03\x46\x41X\x10\x03\x12\t\n\x05PAGER\x10\x04\x12\x0e\n\nTTY_OR_TDD\x10\x05\x12\r\n\tVOICEMAIL\x10\x06\x12\x0b\n\x07VIRTUAL\x10\x07\x12\x08\n\x04VOIP\x10\x08\x12\x16\n\x12MOBILE_OR_LANDLINE\x10\t"\x1a\n\x0bPersonalUri\x12\x0b\n\x03uri\x18\x01 \x01(\t"w\n\x15\x41\x64\x64itionalContactInfo\x12<\n\x05usage\x18\x01 \x01(\x0e\x32-.google.cloud.talent.v4beta1.ContactInfoUsage\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x12\n\ncontact_id\x18\x03 \x01(\t"\xe3\x03\n\x10\x45mploymentRecord\x12%\n\nstart_date\x18\x01 \x01(\x0b\x32\x11.google.type.Date\x12#\n\x08\x65nd_date\x18\x02 \x01(\x0b\x32\x11.google.type.Date\x12\x15\n\remployer_name\x18\x03 \x01(\t\x12\x15\n\rdivision_name\x18\x04 \x01(\t\x12\x35\n\x07\x61\x64\x64ress\x18\x05 \x01(\x0b\x32$.google.cloud.talent.v4beta1.Address\x12\x11\n\tjob_title\x18\x06 \x01(\t\x12\x17\n\x0fjob_description\x18\x07 \x01(\t\x12\x31\n\ris_supervisor\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x34\n\x10is_self_employed\x18\t \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12.\n\nis_current\x18\n \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x19\n\x11job_title_snippet\x18\x0b \x01(\t\x12\x1f\n\x17job_description_snippet\x18\x0c \x01(\t\x12\x1d\n\x15\x65mployer_name_snippet\x18\r \x01(\t"\xc2\x03\n\x0f\x45\x64ucationRecord\x12%\n\nstart_date\x18\x01 \x01(\x0b\x32\x11.google.type.Date\x12#\n\x08\x65nd_date\x18\x02 \x01(\x0b\x32\x11.google.type.Date\x12\x33\n\x18\x65xpected_graduation_date\x18\x03 \x01(\x0b\x32\x11.google.type.Date\x12\x13\n\x0bschool_name\x18\x04 \x01(\t\x12\x35\n\x07\x61\x64\x64ress\x18\x05 \x01(\x0b\x32$.google.cloud.talent.v4beta1.Address\x12\x1c\n\x12\x64\x65gree_description\x18\x06 \x01(\tH\x00\x12@\n\x11structured_degree\x18\x07 \x01(\x0b\x32#.google.cloud.talent.v4beta1.DegreeH\x00\x12\x13\n\x0b\x64\x65scription\x18\x08 \x01(\t\x12.\n\nis_current\x18\t \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x1b\n\x13school_name_snippet\x18\n \x01(\t\x12\x16\n\x0e\x64\x65gree_snippet\x18\x0b \x01(\tB\x08\n\x06\x64\x65gree"t\n\x06\x44\x65gree\x12<\n\x0b\x64\x65gree_type\x18\x01 \x01(\x0e\x32\'.google.cloud.talent.v4beta1.DegreeType\x12\x13\n\x0b\x64\x65gree_name\x18\x02 \x01(\t\x12\x17\n\x0f\x66ields_of_study\x18\x03 \x03(\t"\xc3\x02\n\x08\x41\x63tivity\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0b\n\x03uri\x18\x03 \x01(\t\x12&\n\x0b\x63reate_date\x18\x04 \x01(\x0b\x32\x11.google.type.Date\x12&\n\x0bupdate_date\x18\x05 \x01(\x0b\x32\x11.google.type.Date\x12\x14\n\x0cteam_members\x18\x06 \x03(\t\x12\x37\n\x0bskills_used\x18\x07 \x03(\x0b\x32".google.cloud.talent.v4beta1.Skill\x12\x1d\n\x15\x61\x63tivity_name_snippet\x18\x08 \x01(\t\x12$\n\x1c\x61\x63tivity_description_snippet\x18\t \x01(\t\x12\x1b\n\x13skills_used_snippet\x18\n \x03(\t"\xcb\x01\n\x0bPublication\x12\x0f\n\x07\x61uthors\x18\x01 \x03(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x0f\n\x07journal\x18\x04 \x01(\t\x12\x0e\n\x06volume\x18\x05 \x01(\t\x12\x11\n\tpublisher\x18\x06 \x01(\t\x12+\n\x10publication_date\x18\x07 \x01(\x0b\x32\x11.google.type.Date\x12\x18\n\x10publication_type\x18\x08 \x01(\t\x12\x0c\n\x04isbn\x18\t \x01(\t"\xa9\x02\n\x06Patent\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\x11\n\tinventors\x18\x02 \x03(\t\x12\x15\n\rpatent_status\x18\x03 \x01(\t\x12-\n\x12patent_status_date\x18\x04 \x01(\x0b\x32\x11.google.type.Date\x12-\n\x12patent_filing_date\x18\x05 \x01(\x0b\x32\x11.google.type.Date\x12\x15\n\rpatent_office\x18\x06 \x01(\t\x12\x15\n\rpatent_number\x18\x07 \x01(\t\x12\x1a\n\x12patent_description\x18\x08 \x01(\t\x12\x37\n\x0bskills_used\x18\t \x03(\x0b\x32".google.cloud.talent.v4beta1.SkillB\x82\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x14ProfileResourceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n/google/cloud/talent_v4beta1/proto/profile.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x16google/type/date.proto\x1a google/type/postal_address.proto"\x9c\r\n\x07Profile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x65xternal_id\x18\x02 \x01(\t\x12\x0e\n\x06source\x18\x03 \x01(\t\x12\x0b\n\x03uri\x18\x04 \x01(\t\x12\x10\n\x08group_id\x18\x05 \x01(\t\x12.\n\nis_hirable\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12/\n\x0b\x63reate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x39\n\x15\x63\x61ndidate_update_time\x18\x43 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x12resume_update_time\x18\x44 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x06resume\x18\x35 \x01(\x0b\x32#.google.cloud.talent.v4beta1.Resume\x12=\n\x0cperson_names\x18\x0b \x03(\x0b\x32\'.google.cloud.talent.v4beta1.PersonName\x12\x37\n\taddresses\x18\x0c \x03(\x0b\x32$.google.cloud.talent.v4beta1.Address\x12;\n\x0f\x65mail_addresses\x18\r \x03(\x0b\x32".google.cloud.talent.v4beta1.Email\x12\x39\n\rphone_numbers\x18\x0e \x03(\x0b\x32".google.cloud.talent.v4beta1.Phone\x12?\n\rpersonal_uris\x18\x0f \x03(\x0b\x32(.google.cloud.talent.v4beta1.PersonalUri\x12S\n\x17\x61\x64\x64itional_contact_info\x18\x10 \x03(\x0b\x32\x32.google.cloud.talent.v4beta1.AdditionalContactInfo\x12I\n\x12\x65mployment_records\x18\x11 \x03(\x0b\x32-.google.cloud.talent.v4beta1.EmploymentRecord\x12G\n\x11\x65\x64ucation_records\x18\x12 \x03(\x0b\x32,.google.cloud.talent.v4beta1.EducationRecord\x12\x32\n\x06skills\x18\x13 \x03(\x0b\x32".google.cloud.talent.v4beta1.Skill\x12\x39\n\nactivities\x18\x14 \x03(\x0b\x32%.google.cloud.talent.v4beta1.Activity\x12>\n\x0cpublications\x18\x15 \x03(\x0b\x32(.google.cloud.talent.v4beta1.Publication\x12\x34\n\x07patents\x18\x16 \x03(\x0b\x32#.google.cloud.talent.v4beta1.Patent\x12\x42\n\x0e\x63\x65rtifications\x18\x17 \x03(\x0b\x32*.google.cloud.talent.v4beta1.Certification\x12\x19\n\x0c\x61pplications\x18/ \x03(\tB\x03\xe0\x41\x03\x12\x18\n\x0b\x61ssignments\x18\x30 \x03(\tB\x03\xe0\x41\x03\x12U\n\x11\x63ustom_attributes\x18\x1a \x03(\x0b\x32:.google.cloud.talent.v4beta1.Profile.CustomAttributesEntry\x12\x16\n\tprocessed\x18\x1b \x01(\x08\x42\x03\xe0\x41\x03\x12\x1c\n\x0fkeyword_snippet\x18\x1c \x01(\tB\x03\xe0\x41\x03\x12R\n\x14\x61vailability_signals\x18\x46 \x03(\x0b\x32/.google.cloud.talent.v4beta1.AvailabilitySignalB\x03\xe0\x41\x03\x12\x45\n\x11\x64\x65rived_addresses\x18@ \x03(\x0b\x32%.google.cloud.talent.v4beta1.LocationB\x03\xe0\x41\x03\x1a\x65\n\x15\x43ustomAttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12;\n\x05value\x18\x02 \x01(\x0b\x32,.google.cloud.talent.v4beta1.CustomAttribute:\x02\x38\x01"\xc3\x01\n\x12\x41vailabilitySignal\x12\x41\n\x04type\x18\x01 \x01(\x0e\x32\x33.google.cloud.talent.v4beta1.AvailabilitySignalType\x12\x34\n\x10last_update_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10\x66ilter_satisfied\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.BoolValue"\xb5\x01\n\x06Resume\x12\x19\n\x11structured_resume\x18\x01 \x01(\t\x12\x43\n\x0bresume_type\x18\x02 \x01(\x0e\x32..google.cloud.talent.v4beta1.Resume.ResumeType"K\n\nResumeType\x12\x1b\n\x17RESUME_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05HRXML\x10\x01\x12\x15\n\x11OTHER_RESUME_TYPE\x10\x02"\xbc\x02\n\nPersonName\x12\x18\n\x0e\x66ormatted_name\x18\x01 \x01(\tH\x00\x12W\n\x0fstructured_name\x18\x02 \x01(\x0b\x32<.google.cloud.talent.v4beta1.PersonName.PersonStructuredNameH\x00\x12\x16\n\x0epreferred_name\x18\x03 \x01(\t\x1a\x93\x01\n\x14PersonStructuredName\x12\x12\n\ngiven_name\x18\x01 \x01(\t\x12\x16\n\x0epreferred_name\x18\x06 \x01(\t\x12\x16\n\x0emiddle_initial\x18\x02 \x01(\t\x12\x13\n\x0b\x66\x61mily_name\x18\x03 \x01(\t\x12\x10\n\x08suffixes\x18\x04 \x03(\t\x12\x10\n\x08prefixes\x18\x05 \x03(\tB\r\n\x0bperson_name"\xd9\x01\n\x07\x41\x64\x64ress\x12<\n\x05usage\x18\x01 \x01(\x0e\x32-.google.cloud.talent.v4beta1.ContactInfoUsage\x12\x1e\n\x14unstructured_address\x18\x02 \x01(\tH\x00\x12\x38\n\x12structured_address\x18\x03 \x01(\x0b\x32\x1a.google.type.PostalAddressH\x00\x12+\n\x07\x63urrent\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.BoolValueB\t\n\x07\x61\x64\x64ress"\\\n\x05\x45mail\x12<\n\x05usage\x18\x01 \x01(\x0e\x32-.google.cloud.talent.v4beta1.ContactInfoUsage\x12\x15\n\remail_address\x18\x02 \x01(\t"\xcf\x02\n\x05Phone\x12<\n\x05usage\x18\x01 \x01(\x0e\x32-.google.cloud.talent.v4beta1.ContactInfoUsage\x12:\n\x04type\x18\x02 \x01(\x0e\x32,.google.cloud.talent.v4beta1.Phone.PhoneType\x12\x0e\n\x06number\x18\x03 \x01(\t\x12\x16\n\x0ewhen_available\x18\x04 \x01(\t"\xa3\x01\n\tPhoneType\x12\x1a\n\x16PHONE_TYPE_UNSPECIFIED\x10\x00\x12\x0c\n\x08LANDLINE\x10\x01\x12\n\n\x06MOBILE\x10\x02\x12\x07\n\x03\x46\x41X\x10\x03\x12\t\n\x05PAGER\x10\x04\x12\x0e\n\nTTY_OR_TDD\x10\x05\x12\r\n\tVOICEMAIL\x10\x06\x12\x0b\n\x07VIRTUAL\x10\x07\x12\x08\n\x04VOIP\x10\x08\x12\x16\n\x12MOBILE_OR_LANDLINE\x10\t"\x1a\n\x0bPersonalUri\x12\x0b\n\x03uri\x18\x01 \x01(\t"w\n\x15\x41\x64\x64itionalContactInfo\x12<\n\x05usage\x18\x01 \x01(\x0e\x32-.google.cloud.talent.v4beta1.ContactInfoUsage\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x12\n\ncontact_id\x18\x03 \x01(\t"\xf2\x03\n\x10\x45mploymentRecord\x12%\n\nstart_date\x18\x01 \x01(\x0b\x32\x11.google.type.Date\x12#\n\x08\x65nd_date\x18\x02 \x01(\x0b\x32\x11.google.type.Date\x12\x15\n\remployer_name\x18\x03 \x01(\t\x12\x15\n\rdivision_name\x18\x04 \x01(\t\x12\x35\n\x07\x61\x64\x64ress\x18\x05 \x01(\x0b\x32$.google.cloud.talent.v4beta1.Address\x12\x11\n\tjob_title\x18\x06 \x01(\t\x12\x17\n\x0fjob_description\x18\x07 \x01(\t\x12\x31\n\ris_supervisor\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x34\n\x10is_self_employed\x18\t \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12.\n\nis_current\x18\n \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12\x1e\n\x11job_title_snippet\x18\x0b \x01(\tB\x03\xe0\x41\x03\x12$\n\x17job_description_snippet\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12"\n\x15\x65mployer_name_snippet\x18\r \x01(\tB\x03\xe0\x41\x03"\xcc\x03\n\x0f\x45\x64ucationRecord\x12%\n\nstart_date\x18\x01 \x01(\x0b\x32\x11.google.type.Date\x12#\n\x08\x65nd_date\x18\x02 \x01(\x0b\x32\x11.google.type.Date\x12\x33\n\x18\x65xpected_graduation_date\x18\x03 \x01(\x0b\x32\x11.google.type.Date\x12\x13\n\x0bschool_name\x18\x04 \x01(\t\x12\x35\n\x07\x61\x64\x64ress\x18\x05 \x01(\x0b\x32$.google.cloud.talent.v4beta1.Address\x12\x1c\n\x12\x64\x65gree_description\x18\x06 \x01(\tH\x00\x12@\n\x11structured_degree\x18\x07 \x01(\x0b\x32#.google.cloud.talent.v4beta1.DegreeH\x00\x12\x13\n\x0b\x64\x65scription\x18\x08 \x01(\t\x12.\n\nis_current\x18\t \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12 \n\x13school_name_snippet\x18\n \x01(\tB\x03\xe0\x41\x03\x12\x1b\n\x0e\x64\x65gree_snippet\x18\x0b \x01(\tB\x03\xe0\x41\x03\x42\x08\n\x06\x64\x65gree"t\n\x06\x44\x65gree\x12<\n\x0b\x64\x65gree_type\x18\x01 \x01(\x0e\x32\'.google.cloud.talent.v4beta1.DegreeType\x12\x13\n\x0b\x64\x65gree_name\x18\x02 \x01(\t\x12\x17\n\x0f\x66ields_of_study\x18\x03 \x03(\t"\xd2\x02\n\x08\x41\x63tivity\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0b\n\x03uri\x18\x03 \x01(\t\x12&\n\x0b\x63reate_date\x18\x04 \x01(\x0b\x32\x11.google.type.Date\x12&\n\x0bupdate_date\x18\x05 \x01(\x0b\x32\x11.google.type.Date\x12\x14\n\x0cteam_members\x18\x06 \x03(\t\x12\x37\n\x0bskills_used\x18\x07 \x03(\x0b\x32".google.cloud.talent.v4beta1.Skill\x12"\n\x15\x61\x63tivity_name_snippet\x18\x08 \x01(\tB\x03\xe0\x41\x03\x12)\n\x1c\x61\x63tivity_description_snippet\x18\t \x01(\tB\x03\xe0\x41\x03\x12 \n\x13skills_used_snippet\x18\n \x03(\tB\x03\xe0\x41\x03"\xcb\x01\n\x0bPublication\x12\x0f\n\x07\x61uthors\x18\x01 \x03(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x0f\n\x07journal\x18\x04 \x01(\t\x12\x0e\n\x06volume\x18\x05 \x01(\t\x12\x11\n\tpublisher\x18\x06 \x01(\t\x12+\n\x10publication_date\x18\x07 \x01(\x0b\x32\x11.google.type.Date\x12\x18\n\x10publication_type\x18\x08 \x01(\t\x12\x0c\n\x04isbn\x18\t \x01(\t"\xa9\x02\n\x06Patent\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\x11\n\tinventors\x18\x02 \x03(\t\x12\x15\n\rpatent_status\x18\x03 \x01(\t\x12-\n\x12patent_status_date\x18\x04 \x01(\x0b\x32\x11.google.type.Date\x12-\n\x12patent_filing_date\x18\x05 \x01(\x0b\x32\x11.google.type.Date\x12\x15\n\rpatent_office\x18\x06 \x01(\t\x12\x15\n\rpatent_number\x18\x07 \x01(\t\x12\x1a\n\x12patent_description\x18\x08 \x01(\t\x12\x37\n\x0bskills_used\x18\t \x03(\x0b\x32".google.cloud.talent.v4beta1.SkillB\x82\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x14ProfileResourceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -76,8 +76,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1929, - serialized_end=2004, + serialized_start=2346, + serialized_end=2421, ) _sym_db.RegisterEnumDescriptor(_RESUME_RESUMETYPE) @@ -128,8 +128,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2812, - serialized_end=2975, + serialized_start=3229, + serialized_end=3392, ) _sym_db.RegisterEnumDescriptor(_PHONE_PHONETYPE) @@ -186,8 +186,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1719, - serialized_end=1820, + serialized_start=1938, + serialized_end=2039, ) _PROFILE = _descriptor.Descriptor( @@ -341,10 +341,46 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="candidate_update_time", + full_name="google.cloud.talent.v4beta1.Profile.candidate_update_time", + index=8, + number=67, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resume_update_time", + full_name="google.cloud.talent.v4beta1.Profile.resume_update_time", + index=9, + number=68, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="resume", full_name="google.cloud.talent.v4beta1.Profile.resume", - index=8, + index=10, number=53, type=11, cpp_type=10, @@ -362,7 +398,7 @@ _descriptor.FieldDescriptor( name="person_names", full_name="google.cloud.talent.v4beta1.Profile.person_names", - index=9, + index=11, number=11, type=11, cpp_type=10, @@ -380,7 +416,7 @@ _descriptor.FieldDescriptor( name="addresses", full_name="google.cloud.talent.v4beta1.Profile.addresses", - index=10, + index=12, number=12, type=11, cpp_type=10, @@ -398,7 +434,7 @@ _descriptor.FieldDescriptor( name="email_addresses", full_name="google.cloud.talent.v4beta1.Profile.email_addresses", - index=11, + index=13, number=13, type=11, cpp_type=10, @@ -416,7 +452,7 @@ _descriptor.FieldDescriptor( name="phone_numbers", full_name="google.cloud.talent.v4beta1.Profile.phone_numbers", - index=12, + index=14, number=14, type=11, cpp_type=10, @@ -434,7 +470,7 @@ _descriptor.FieldDescriptor( name="personal_uris", full_name="google.cloud.talent.v4beta1.Profile.personal_uris", - index=13, + index=15, number=15, type=11, cpp_type=10, @@ -452,7 +488,7 @@ _descriptor.FieldDescriptor( name="additional_contact_info", full_name="google.cloud.talent.v4beta1.Profile.additional_contact_info", - index=14, + index=16, number=16, type=11, cpp_type=10, @@ -470,7 +506,7 @@ _descriptor.FieldDescriptor( name="employment_records", full_name="google.cloud.talent.v4beta1.Profile.employment_records", - index=15, + index=17, number=17, type=11, cpp_type=10, @@ -488,7 +524,7 @@ _descriptor.FieldDescriptor( name="education_records", full_name="google.cloud.talent.v4beta1.Profile.education_records", - index=16, + index=18, number=18, type=11, cpp_type=10, @@ -506,7 +542,7 @@ _descriptor.FieldDescriptor( name="skills", full_name="google.cloud.talent.v4beta1.Profile.skills", - index=17, + index=19, number=19, type=11, cpp_type=10, @@ -524,7 +560,7 @@ _descriptor.FieldDescriptor( name="activities", full_name="google.cloud.talent.v4beta1.Profile.activities", - index=18, + index=20, number=20, type=11, cpp_type=10, @@ -542,7 +578,7 @@ _descriptor.FieldDescriptor( name="publications", full_name="google.cloud.talent.v4beta1.Profile.publications", - index=19, + index=21, number=21, type=11, cpp_type=10, @@ -560,7 +596,7 @@ _descriptor.FieldDescriptor( name="patents", full_name="google.cloud.talent.v4beta1.Profile.patents", - index=20, + index=22, number=22, type=11, cpp_type=10, @@ -578,7 +614,7 @@ _descriptor.FieldDescriptor( name="certifications", full_name="google.cloud.talent.v4beta1.Profile.certifications", - index=21, + index=23, number=23, type=11, cpp_type=10, @@ -596,7 +632,7 @@ _descriptor.FieldDescriptor( name="applications", full_name="google.cloud.talent.v4beta1.Profile.applications", - index=22, + index=24, number=47, type=9, cpp_type=9, @@ -608,13 +644,13 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="assignments", full_name="google.cloud.talent.v4beta1.Profile.assignments", - index=23, + index=25, number=48, type=9, cpp_type=9, @@ -626,13 +662,13 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="custom_attributes", full_name="google.cloud.talent.v4beta1.Profile.custom_attributes", - index=24, + index=26, number=26, type=11, cpp_type=10, @@ -650,7 +686,7 @@ _descriptor.FieldDescriptor( name="processed", full_name="google.cloud.talent.v4beta1.Profile.processed", - index=25, + index=27, number=27, type=8, cpp_type=7, @@ -662,13 +698,13 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="keyword_snippet", full_name="google.cloud.talent.v4beta1.Profile.keyword_snippet", - index=26, + index=28, number=28, type=9, cpp_type=9, @@ -680,13 +716,31 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="availability_signals", + full_name="google.cloud.talent.v4beta1.Profile.availability_signals", + index=29, + number=70, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="derived_addresses", full_name="google.cloud.talent.v4beta1.Profile.derived_addresses", - index=27, + index=30, number=64, type=11, cpp_type=10, @@ -711,7 +765,82 @@ extension_ranges=[], oneofs=[], serialized_start=347, - serialized_end=1820, + serialized_end=2039, +) + + +_AVAILABILITYSIGNAL = _descriptor.Descriptor( + name="AvailabilitySignal", + full_name="google.cloud.talent.v4beta1.AvailabilitySignal", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="type", + full_name="google.cloud.talent.v4beta1.AvailabilitySignal.type", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="last_update_time", + full_name="google.cloud.talent.v4beta1.AvailabilitySignal.last_update_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter_satisfied", + full_name="google.cloud.talent.v4beta1.AvailabilitySignal.filter_satisfied", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2042, + serialized_end=2237, ) @@ -767,8 +896,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1823, - serialized_end=2004, + serialized_start=2240, + serialized_end=2421, ) @@ -896,8 +1025,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2161, - serialized_end=2308, + serialized_start=2578, + serialized_end=2725, ) _PERSONNAME = _descriptor.Descriptor( @@ -978,8 +1107,8 @@ fields=[], ) ], - serialized_start=2007, - serialized_end=2323, + serialized_start=2424, + serialized_end=2740, ) @@ -1079,8 +1208,8 @@ fields=[], ) ], - serialized_start=2326, - serialized_end=2543, + serialized_start=2743, + serialized_end=2960, ) @@ -1136,8 +1265,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2545, - serialized_end=2637, + serialized_start=2962, + serialized_end=3054, ) @@ -1229,8 +1358,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2640, - serialized_end=2975, + serialized_start=3057, + serialized_end=3392, ) @@ -1268,8 +1397,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2977, - serialized_end=3003, + serialized_start=3394, + serialized_end=3420, ) @@ -1343,8 +1472,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3005, - serialized_end=3124, + serialized_start=3422, + serialized_end=3541, ) @@ -1550,7 +1679,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1568,7 +1697,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1586,7 +1715,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1598,8 +1727,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3127, - serialized_end=3610, + serialized_start=3544, + serialized_end=4042, ) @@ -1787,7 +1916,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1805,7 +1934,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1825,8 +1954,8 @@ fields=[], ) ], - serialized_start=3613, - serialized_end=4063, + serialized_start=4045, + serialized_end=4505, ) @@ -1900,8 +2029,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4065, - serialized_end=4181, + serialized_start=4507, + serialized_end=4623, ) @@ -2053,7 +2182,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2071,7 +2200,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2089,7 +2218,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2101,8 +2230,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4184, - serialized_end=4507, + serialized_start=4626, + serialized_end=4964, ) @@ -2284,8 +2413,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4510, - serialized_end=4713, + serialized_start=4967, + serialized_end=5170, ) @@ -2467,8 +2596,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4716, - serialized_end=5013, + serialized_start=5173, + serialized_end=5470, ) _PROFILE_CUSTOMATTRIBUTESENTRY.fields_by_name[ @@ -2486,6 +2615,12 @@ _PROFILE.fields_by_name[ "update_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_PROFILE.fields_by_name[ + "candidate_update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_PROFILE.fields_by_name[ + "resume_update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _PROFILE.fields_by_name["resume"].message_type = _RESUME _PROFILE.fields_by_name["person_names"].message_type = _PERSONNAME _PROFILE.fields_by_name["addresses"].message_type = _ADDRESS @@ -2509,11 +2644,23 @@ _PROFILE.fields_by_name[ "custom_attributes" ].message_type = _PROFILE_CUSTOMATTRIBUTESENTRY +_PROFILE.fields_by_name["availability_signals"].message_type = _AVAILABILITYSIGNAL _PROFILE.fields_by_name[ "derived_addresses" ].message_type = ( google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2._LOCATION ) +_AVAILABILITYSIGNAL.fields_by_name[ + "type" +].enum_type = ( + google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2._AVAILABILITYSIGNALTYPE +) +_AVAILABILITYSIGNAL.fields_by_name[ + "last_update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_AVAILABILITYSIGNAL.fields_by_name[ + "filter_satisfied" +].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE _RESUME.fields_by_name["resume_type"].enum_type = _RESUME_RESUMETYPE _RESUME_RESUMETYPE.containing_type = _RESUME _PERSONNAME_PERSONSTRUCTUREDNAME.containing_type = _PERSONNAME @@ -2639,6 +2786,7 @@ "skills_used" ].message_type = google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2._SKILL DESCRIPTOR.message_types_by_name["Profile"] = _PROFILE +DESCRIPTOR.message_types_by_name["AvailabilitySignal"] = _AVAILABILITYSIGNAL DESCRIPTOR.message_types_by_name["Resume"] = _RESUME DESCRIPTOR.message_types_by_name["PersonName"] = _PERSONNAME DESCRIPTOR.message_types_by_name["Address"] = _ADDRESS @@ -2678,25 +2826,23 @@ Required during profile update. Resource name assigned to a profile by the API. The format is "projects/{project\_id}/ten ants/{tenant\_id}/profiles/{profile\_id}", for example, - "projects/api-test-project/tenants/foo/profiles/bar". + "projects/foo/tenants/bar/profiles/baz". external_id: - Optional. Profile's id in client system if available. This - value is not required to be unique to each profile. However, - providing unique values makes it easier to specify individual - profiles when filing support tickets. The maximum number of - bytes allowed is 100. + Profile's id in client system, if available. This value is + unique for each profile inside a tenant. An error is thrown if + another profile with the same external\_id is created. The + maximum number of bytes allowed is 100. source: - Optional. The source description indicating where the profile - is acquired. For example, if a candidate profile is acquired + The source description indicating where the profile is + acquired. For example, if a candidate profile is acquired from a resume, the user can input "resume" here to indicate the source. The maximum number of bytes allowed is 100. uri: - Optional. The URI set by clients that links to this profile's - client-side copy. The maximum number of bytes allowed is - 4000. + The URI set by clients that links to this profile's client- + side copy. The maximum number of bytes allowed is 4000. group_id: - Optional. The cluster id of the profile to associate with - other profile(s) for the same candidate. This field should be + The cluster id of the profile to associate with other + profile(s) for the same candidate. This field should be generated by the customer. If a value is not provided, a random UUID is assigned to this field of the profile. This is used to link multiple profiles to the same candidate. For @@ -2708,23 +2854,50 @@ and assign it when the second profile is created, indicating these two profiles are referring to the same candidate. is_hirable: - Optional. Indicates the hirable status of the candidate. + Indicates the hirable status of the candidate. create_time: - Optional. The timestamp when the profile was first created at - this source. + The timestamp when the profile was first created at this + source. update_time: - Optional. The timestamp when the profile was last updated at - this source. + The timestamp when the profile was last updated at this + source. + candidate_update_time: + The timestamp when the profile was last updated as a result of + a direct or indirect action by a candidate. These actions + include: - Direct actions such as the candidate submitting a + new resume as part of a job application to the agency, + using a self-service tool such as a website to update their + profile, and so on. - Indirect actions by the candidate such + as uploading a resume to a job board that is collected by + the agency through a feed, providing a resume to a + recruiter who then uploads it into the ATS, and so on. - + Updates made to the candidate's profile by the recruiter as a + result of interacting with the candidate (for example + adding a skill or work preference, and so on). Changes to + [recruiting\_notes][google.cloud.talent.v4beta1.Profile.recrui + ting\_notes] are specifically excluded from this action + type. Note: [candidate\_update\_time][google.cloud.talent.v4b + eta1.Profile.candidate\_update\_time] must be greater than or + equal to [resume\_update\_time][google.cloud.talent.v4beta1.Pr + ofile.resume\_update\_time] or an error is thrown. + resume_update_time: + The timestamp when the candidate's resume was added or updated + on the candidate's profile. Whether that resume was directly + uploaded by a candidate, pulled from a 3rd party job board + feed, added by a recruiter, and so on. If this field is + updated, it's expected that + [resume][google.cloud.talent.v4beta1.Profile.resume] is + provided in the create or update calls. resume: - Optional. The resume representing this profile. + The resume representing this profile. person_names: - Optional. The names of the candidate this profile references. - Currently only one person name is supported. + The names of the candidate this profile references. Currently + only one person name is supported. addresses: - Optional. The candidate's postal addresses. It's highly - recommended to input this information as accurately as - possible to help improve search quality. Here are some - recommendations: - Provide + The candidate's postal addresses. It's highly recommended to + input this information as accurately as possible to help + improve search quality. Here are some recommendations: - + Provide [Address.usage][google.cloud.talent.v4beta1.Address.usage] if possible, especially if the address is PERSONAL. During a search only personal addresses are considered. If there is @@ -2755,24 +2928,24 @@ [Address.current][google.cloud.talent.v4beta1.Address.current] is false or not set. email_addresses: - Optional. The candidate's email addresses. + The candidate's email addresses. phone_numbers: - Optional. The candidate's phone number(s). + The candidate's phone number(s). personal_uris: - Optional. The candidate's personal URIs. + The candidate's personal URIs. additional_contact_info: - Optional. Available contact information besides + Available contact information besides [addresses][google.cloud.talent.v4beta1.Profile.addresses], [e mail\_addresses][google.cloud.talent.v4beta1.Profile.email\_ad dresses], [phone\_numbers][google.cloud.talent.v4beta1.Profile .phone\_numbers] and [personal\_uris][google.cloud.talent.v4be ta1.Profile.personal\_uris]. For example, Hang-out, Skype. employment_records: - Optional. The employment history records of the candidate. - It's highly recommended to input this information as - accurately as possible to help improve search quality. Here - are some recommendations: - Specify the start and end dates - of the employment records. - List different employment types + The employment history records of the candidate. It's highly + recommended to input this information as accurately as + possible to help improve search quality. Here are some + recommendations: - Specify the start and end dates of the + employment records. - List different employment types separately, no matter how minor the change is. For example, only job title is changed from "software engineer" to "senior software engineer". - Provide [EmploymentRecord.is @@ -2781,8 +2954,8 @@ inferred from user inputs. The limitation for max number of employment records is 100. education_records: - Optional. The education history record of the candidate. It's - highly recommended to input this information as accurately as + The education history record of the candidate. It's highly + recommended to input this information as accurately as possible to help improve search quality. Here are some recommendations: - Specify the start and end dates of the education records. - List each education type separately, no @@ -2794,23 +2967,23 @@ from user inputs. The limitation for max number of education records is 100. skills: - Optional. The skill set of the candidate. It's highly - recommended to provide as much information as possible to help - improve the search quality. The limitation for max number of - skills is 500. + The skill set of the candidate. It's highly recommended to + provide as much information as possible to help improve the + search quality. The limitation for max number of skills is + 500. activities: - Optional. The individual or collaborative activities which the - candidate has participated in, for example, open-source - projects, class assignments that aren't listed in [employment\ - _records][google.cloud.talent.v4beta1.Profile.employment\_reco - rds]. The limitation for max number of activities is 50. + The individual or collaborative activities which the candidate + has participated in, for example, open-source projects, class + assignments that aren't listed in [employment\_records][google + .cloud.talent.v4beta1.Profile.employment\_records]. The + limitation for max number of activities is 50. publications: - Optional. The publications published by the candidate. The - limitation for max number of publications is 50. + The publications published by the candidate. The limitation + for max number of publications is 50. patents: - Optional. The patents acquired by the candidate. + The patents acquired by the candidate. certifications: - Optional. The certifications acquired by the candidate. + The certifications acquired by the candidate. applications: Output only. The resource names of the candidate's applications. @@ -2818,9 +2991,9 @@ Output only. The resource names of the candidate's assignments. custom_attributes: - Optional. A map of fields to hold both filterable and non- - filterable custom profile attributes that aren't covered by - the provided structured fields. See + A map of fields to hold both filterable and non-filterable + custom profile attributes that aren't covered by the provided + structured fields. See [CustomAttribute][google.cloud.talent.v4beta1.CustomAttribute] for more details. At most 100 filterable and at most 100 unfilterable keys are supported. If limit is exceeded, an @@ -2844,6 +3017,8 @@ related to a search query. This is only returned in [SearchPro filesResponse][google.cloud.talent.v4beta1.SearchProfilesRespo nse]. + availability_signals: + Output only. Candidate's availability signals. derived_addresses: Output only. Derived locations of the profile, resolved from [ Profile.addresses][google.cloud.talent.v4beta1.Profile.address @@ -2858,6 +3033,38 @@ _sym_db.RegisterMessage(Profile) _sym_db.RegisterMessage(Profile.CustomAttributesEntry) +AvailabilitySignal = _reflection.GeneratedProtocolMessageType( + "AvailabilitySignal", + (_message.Message,), + dict( + DESCRIPTOR=_AVAILABILITYSIGNAL, + __module__="google.cloud.talent_v4beta1.proto.profile_pb2", + __doc__="""Candidate availability signal. + + + Attributes: + type: + Type of signal. + last_update_time: + Timestamp of when the given availability activity last + happened. + filter_satisfied: + Indicates if the [last\_update\_time][google.cloud.talent.v4be + ta1.AvailabilitySignal.last\_update\_time] is within [Availabi + lityFilter.range][google.cloud.talent.v4beta1.AvailabilityFilt + er.range]. Returned only in a search response when there is + an [AvailabilityFilter][google.cloud.talent.v4beta1.Availabili + tyFilter] in [ProfileQuery.availability\_filters][google.cloud + .talent.v4beta1.ProfileQuery.availability\_filters] where [sig + nal\_type][google.cloud.talent.v4beta1.AvailabilityFilter.sign + al\_type] matches + [type][google.cloud.talent.v4beta1.AvailabilitySignal.type]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.AvailabilitySignal) + ), +) +_sym_db.RegisterMessage(AvailabilitySignal) + Resume = _reflection.GeneratedProtocolMessageType( "Resume", (_message.Message,), @@ -2869,9 +3076,8 @@ Attributes: structured_resume: - Optional. Users can create a profile with only this field - field, if [resume\_type][google.cloud.talent.v4beta1.Resume.re - sume\_type] is + Users can create a profile with only this field field, if [res + ume\_type][google.cloud.talent.v4beta1.Resume.resume\_type] is [HRXML][google.cloud.talent.v4beta1.Resume.ResumeType.HRXML]. For example, the API parses this field and creates a profile with all structured fields populated. [EmploymentRecord][googl @@ -2882,8 +3088,8 @@ from resumes is an Alpha feature and as such is not covered by any SLA. resume_type: - Optional. The format of [structured\_resume][google.cloud.tale - nt.v4beta1.Resume.structured\_resume]. + The format of [structured\_resume][google.cloud.talent.v4beta1 + .Resume.structured\_resume]. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.Resume) ), @@ -2905,24 +3111,24 @@ Attributes: given_name: - Optional. Given/first name. It's derived from [formatted\_nam - e][google.cloud.talent.v4beta1.PersonName.formatted\_name] if - not provided. Number of characters allowed is 100. + Given/first name. It's derived from [formatted\_name][google. + cloud.talent.v4beta1.PersonName.formatted\_name] if not + provided. Number of characters allowed is 100. preferred_name: - Optional. Preferred given/first name or nickname. Number of - characters allowed is 100. + Preferred given/first name or nickname. Number of characters + allowed is 100. middle_initial: - Optional. Middle initial. It's derived from [formatted\_name] - [google.cloud.talent.v4beta1.PersonName.formatted\_name] if - not provided. Number of characters allowed is 20. + Middle initial. It's derived from [formatted\_name][google.cl + oud.talent.v4beta1.PersonName.formatted\_name] if not + provided. Number of characters allowed is 20. family_name: - Optional. Family/last name. It's derived from [formatted\_nam - e][google.cloud.talent.v4beta1.PersonName.formatted\_name] if - not provided. Number of characters allowed is 100. + Family/last name. It's derived from [formatted\_name][google. + cloud.talent.v4beta1.PersonName.formatted\_name] if not + provided. Number of characters allowed is 100. suffixes: - Optional. Suffixes. Number of characters allowed is 20. + Suffixes. Number of characters allowed is 20. prefixes: - Optional. Prefixes. Number of characters allowed is 20. + Prefixes. Number of characters allowed is 20. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.PersonName.PersonStructuredName) ), @@ -2939,17 +3145,15 @@ red\_name][google.cloud.talent.v4beta1.PersonName.structured\_ name]. formatted_name: - Optional. A string represents a person's full name. For - example, "Dr. John Smith". Number of characters allowed is - 100. + A string represents a person's full name. For example, "Dr. + John Smith". Number of characters allowed is 100. structured_name: - Optional. A person's name in a structured way (last name, - first name, suffix, and so on.) + A person's name in a structured way (last name, first name, + suffix, and so on.) preferred_name: - Optional. Preferred name for the person. This field is ignored - if [structured\_name][google.cloud.talent.v4beta1.PersonName.s - tructured\_name] is provided. Number of characters allowed is - 100. + Preferred name for the person. This field is ignored if [struc + tured\_name][google.cloud.talent.v4beta1.PersonName.structured + \_name] is provided. Number of characters allowed is 100. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.PersonName) ), @@ -2968,22 +3172,21 @@ Attributes: usage: - Optional. The usage of the address. For example, SCHOOL, WORK, - PERSONAL. + The usage of the address. For example, SCHOOL, WORK, PERSONAL. address: The address of a person. It can be one of [unstructured\_addre ss][google.cloud.talent.v4beta1.Address.unstructured\_address] or [structured\_address][google.cloud.talent.v4beta1.Address.s tructured\_address]. unstructured_address: - Optional. Unstructured address. For example, "1600 - Amphitheatre Pkwy, Mountain View, CA 94043", "Sunnyvale, - California". Number of characters allowed is 100. + Unstructured address. For example, "1600 Amphitheatre Pkwy, + Mountain View, CA 94043", "Sunnyvale, California". Number of + characters allowed is 100. structured_address: - Optional. Structured address that contains street address, - city, state, country, and so on. + Structured address that contains street address, city, state, + country, and so on. current: - Optional. Indicates if it's the person's current address. + Indicates if it's the person's current address. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.Address) ), @@ -3001,11 +3204,10 @@ Attributes: usage: - Optional. The usage of the email address. For example, SCHOOL, - WORK, PERSONAL. + The usage of the email address. For example, SCHOOL, WORK, + PERSONAL. email_address: - Optional. Email address. Number of characters allowed is - 4,000. + Email address. Number of characters allowed is 4,000. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.Email) ), @@ -3023,19 +3225,18 @@ Attributes: usage: - Optional. The usage of the phone. For example, SCHOOL, WORK, - PERSONAL. + The usage of the phone. For example, SCHOOL, WORK, PERSONAL. type: - Optional. The phone type. For example, LANDLINE, MOBILE, FAX. + The phone type. For example, LANDLINE, MOBILE, FAX. number: - Optional. Phone number. Any phone formats are supported and - only exact matches are performed on searches. For example, if - a phone number in profile is provided in the format of - "(xxx)xxx-xxxx", in profile searches the same phone format has - to be provided. Number of characters allowed is 20. + Phone number. Any phone formats are supported and only exact + matches are performed on searches. For example, if a phone + number in profile is provided in the format of "(xxx)xxx- + xxxx", in profile searches the same phone format has to be + provided. Number of characters allowed is 20. when_available: - Optional. When this number is available. Any descriptive - string is expected. Number of characters allowed is 100. + When this number is available. Any descriptive string is + expected. Number of characters allowed is 100. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.Phone) ), @@ -3053,8 +3254,7 @@ Attributes: uri: - Optional. The personal URI. Number of characters allowed is - 4,000. + The personal URI. Number of characters allowed is 4,000. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.PersonalUri) ), @@ -3073,14 +3273,13 @@ Attributes: usage: - Optional. The usage of this contact method. For example, - SCHOOL, WORK, PERSONAL. + The usage of this contact method. For example, SCHOOL, WORK, + PERSONAL. name: - Optional. The name of the contact method. For example, - "hangout", "skype". Number of characters allowed is 100. + The name of the contact method. For example, "hangout", + "skype". Number of characters allowed is 100. contact_id: - Optional. The contact id. Number of characters allowed is - 100. + The contact id. Number of characters allowed is 100. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.AdditionalContactInfo) ), @@ -3098,32 +3297,32 @@ Attributes: start_date: - Optional. Start date of the employment. + Start date of the employment. end_date: - Optional. End date of the employment. + End date of the employment. employer_name: - Optional. The name of the employer company/organization. For - example, "Google", "Alphabet", and so on. Number of - characters allowed is 250. + The name of the employer company/organization. For example, + "Google", "Alphabet", and so on. Number of characters allowed + is 250. division_name: - Optional. The division name of the employment. For example, - division, department, client, and so on. Number of characters - allowed is 100. + The division name of the employment. For example, division, + department, client, and so on. Number of characters allowed + is 100. address: - Optional. The physical address of the employer. + The physical address of the employer. job_title: - Optional. The job title of the employment. For example, - "Software Engineer", "Data Scientist", and so on. Number of - characters allowed is 250. + The job title of the employment. For example, "Software + Engineer", "Data Scientist", and so on. Number of characters + allowed is 250. job_description: - Optional. The description of job content. Number of - characters allowed is 100,000. + The description of job content. Number of characters allowed + is 100,000. is_supervisor: - Optional. If the jobs is a supervisor position. + If the jobs is a supervisor position. is_self_employed: - Optional. If this employment is self-employed. + If this employment is self-employed. is_current: - Optional. If this employment is current. + If this employment is current. job_title_snippet: Output only. The job title snippet shows how the [job\_title][ google.cloud.talent.v4beta1.EmploymentRecord.job\_title] is @@ -3159,34 +3358,33 @@ Attributes: start_date: - Optional. The start date of the education. + The start date of the education. end_date: - Optional. The end date of the education. + The end date of the education. expected_graduation_date: - Optional. The expected graduation date if currently pursuing a - degree. + The expected graduation date if currently pursuing a degree. school_name: - Optional. The name of the school or institution. For example, - "Stanford University", "UC Berkeley", and so on. Number of - characters allowed is 250. + The name of the school or institution. For example, "Stanford + University", "UC Berkeley", and so on. Number of characters + allowed is 250. address: - Optional. The physical address of the education institution. + The physical address of the education institution. degree: The degree information. It can be one of [degree\_description] [google.cloud.talent.v4beta1.EducationRecord.degree\_descripti on] or [structured\_degree][google.cloud.talent.v4beta1.Educat ionRecord.structured\_degree]. degree_description: - Optional. The full description of the degree. For example, - "Master of Science in Computer Science", "B.S in Math". - Number of characters allowed is 100. + The full description of the degree. For example, "Master of + Science in Computer Science", "B.S in Math". Number of + characters allowed is 100. structured_degree: - Optional. The structured notation of the degree. + The structured notation of the degree. description: - Optional. The description of the education. Number of - characters allowed is 100,000. + The description of the education. Number of characters + allowed is 100,000. is_current: - Optional. If this education is current. + If this education is current. school_name_snippet: Output only. The school name snippet shows how the [school\_na me][google.cloud.talent.v4beta1.EducationRecord.school\_name] @@ -3216,14 +3414,13 @@ Attributes: degree_type: - Optional. ISCED degree type. + ISCED degree type. degree_name: - Optional. Full Degree name. For example, "B.S.", "Master of - Arts", and so on. Number of characters allowed is 100. + Full Degree name. For example, "B.S.", "Master of Arts", and + so on. Number of characters allowed is 100. fields_of_study: - Optional. Fields of study for the degree. For example, - "Computer science", "engineering". Number of characters - allowed is 100. + Fields of study for the degree. For example, "Computer + science", "engineering". Number of characters allowed is 100. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.Degree) ), @@ -3243,25 +3440,23 @@ class assignment, and so on. Attributes: display_name: - Optional. Activity display name. Number of characters allowed - is 100. + Activity display name. Number of characters allowed is 100. description: - Optional. Activity description. Number of characters allowed - is 100,000. + Activity description. Number of characters allowed is + 100,000. uri: - Optional. Activity URI. Number of characters allowed is - 4,000. + Activity URI. Number of characters allowed is 4,000. create_date: - Optional. The first creation date of the activity. + The first creation date of the activity. update_date: - Optional. The last update date of the activity. + The last update date of the activity. team_members: - Optional. A list of team members involved in this activity. - Number of characters allowed is 100. The limitation for max - number of team members is 50. + A list of team members involved in this activity. Number of + characters allowed is 100. The limitation for max number of + team members is 50. skills_used: - Optional. A list of skills used in this activity. The - limitation for max number of skills used is 50. + A list of skills used in this activity. The limitation for + max number of skills used is 50. activity_name_snippet: Output only. Activity name snippet shows how the [display\_nam e][google.cloud.talent.v4beta1.Activity.display\_name] is @@ -3297,29 +3492,27 @@ class assignment, and so on. Attributes: authors: - Optional. A list of author names. Number of characters - allowed is 100. + A list of author names. Number of characters allowed is 100. title: - Optional. The title of the publication. Number of characters - allowed is 100. + The title of the publication. Number of characters allowed is + 100. description: - Optional. The description of the publication. Number of - characters allowed is 100,000. + The description of the publication. Number of characters + allowed is 100,000. journal: - Optional. The journal name of the publication. Number of - characters allowed is 100. + The journal name of the publication. Number of characters + allowed is 100. volume: - Optional. Volume number. Number of characters allowed is 100. + Volume number. Number of characters allowed is 100. publisher: - Optional. The publisher of the journal. Number of characters - allowed is 100. + The publisher of the journal. Number of characters allowed is + 100. publication_date: - Optional. The publication date. + The publication date. publication_type: - Optional. The publication type. Number of characters allowed - is 100. + The publication type. Number of characters allowed is 100. isbn: - Optional. ISBN number. Number of characters allowed is 100. + ISBN number. Number of characters allowed is 100. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.Publication) ), @@ -3337,30 +3530,28 @@ class assignment, and so on. Attributes: display_name: - Optional. Name of the patent. Number of characters allowed is - 100. + Name of the patent. Number of characters allowed is 100. inventors: - Optional. A list of inventors' names. Number of characters - allowed for each is 100. + A list of inventors' names. Number of characters allowed for + each is 100. patent_status: - Optional. The status of the patent. Number of characters - allowed is 100. + The status of the patent. Number of characters allowed is + 100. patent_status_date: - Optional. The date the last time the status of the patent was - checked. + The date the last time the status of the patent was checked. patent_filing_date: - Optional. The date that the patent was filed. + The date that the patent was filed. patent_office: - Optional. The name of the patent office. Number of characters - allowed is 100. + The name of the patent office. Number of characters allowed + is 100. patent_number: - Optional. The number of the patent. Number of characters - allowed is 100. + The number of the patent. Number of characters allowed is + 100. patent_description: - Optional. The description of the patent. Number of characters - allowed is 100,000. + The description of the patent. Number of characters allowed + is 100,000. skills_used: - Optional. The skills used in this patent. + The skills used in this patent. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.Patent) ), @@ -3370,5 +3561,18 @@ class assignment, and so on. DESCRIPTOR._options = None _PROFILE_CUSTOMATTRIBUTESENTRY._options = None +_PROFILE.fields_by_name["applications"]._options = None +_PROFILE.fields_by_name["assignments"]._options = None +_PROFILE.fields_by_name["processed"]._options = None +_PROFILE.fields_by_name["keyword_snippet"]._options = None +_PROFILE.fields_by_name["availability_signals"]._options = None _PROFILE.fields_by_name["derived_addresses"]._options = None +_EMPLOYMENTRECORD.fields_by_name["job_title_snippet"]._options = None +_EMPLOYMENTRECORD.fields_by_name["job_description_snippet"]._options = None +_EMPLOYMENTRECORD.fields_by_name["employer_name_snippet"]._options = None +_EDUCATIONRECORD.fields_by_name["school_name_snippet"]._options = None +_EDUCATIONRECORD.fields_by_name["degree_snippet"]._options = None +_ACTIVITY.fields_by_name["activity_name_snippet"]._options = None +_ACTIVITY.fields_by_name["activity_description_snippet"]._options = None +_ACTIVITY.fields_by_name["skills_used_snippet"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/talent/google/cloud/talent_v4beta1/proto/profile_service.proto b/talent/google/cloud/talent_v4beta1/proto/profile_service.proto index 97cb9588f038..2e3ba898db83 100644 --- a/talent/google/cloud/talent_v4beta1/proto/profile_service.proto +++ b/talent/google/cloud/talent_v4beta1/proto/profile_service.proto @@ -19,10 +19,12 @@ package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/cloud/talent/v4beta1/common.proto"; import "google/cloud/talent/v4beta1/filters.proto"; import "google/cloud/talent/v4beta1/histogram.proto"; import "google/cloud/talent/v4beta1/profile.proto"; +import "google/protobuf/any.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; @@ -100,25 +102,43 @@ message ListProfilesRequest { // Required. The resource name of the tenant under which the profile is // created. // - // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenants/foo". - string parent = 1; + // The format is "projects/{project_id}/tenants/{tenant_id}". For example, + // "projects/foo/tenants/bar". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. The token that specifies the current offset (that is, starting - // result). + // The filter string specifies the profiles to be enumerated. + // + // Supported operator: =, AND + // + // The field(s) eligible for filtering are: + // + // * `externalId` + // * `groupId` + // + // externalId and groupId cannot be specified at the same time. If both + // externalId and groupId are provided, the API will return a bad request + // error. + // + // Sample Query: + // + // * externalId = "externalId-1" + // * groupId = "groupId-1" + string filter = 5; + + // The token that specifies the current offset (that is, starting result). // // Please set the value to // [ListProfilesResponse.next_page_token][google.cloud.talent.v4beta1.ListProfilesResponse.next_page_token] // to continue the list. string page_token = 2; - // Optional. The maximum number of profiles to be returned, at most 100. + // The maximum number of profiles to be returned, at most 100. // // Default is 100 unless a positive number smaller than 100 is specified. int32 page_size = 3; - // Optional. A field mask to specify the profile fields to be listed in - // response. All fields are listed if it is unset. + // A field mask to specify the profile fields to be listed in response. + // All fields are listed if it is unset. // // Valid values are: // @@ -140,12 +160,12 @@ message ListProfilesResponse { message CreateProfileRequest { // Required. The name of the tenant this profile belongs to. // - // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenants/foo". - string parent = 1; + // The format is "projects/{project_id}/tenants/{tenant_id}". For example, + // "projects/foo/tenants/bar". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The profile to be created. - Profile profile = 2; + Profile profile = 2 [(google.api.field_behavior) = REQUIRED]; } // Get profile request. @@ -153,17 +173,17 @@ message GetProfileRequest { // Required. Resource name of the profile to get. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", - // for example, "projects/api-test-project/tenants/foo/profiles/bar". - string name = 1; + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}". For + // example, "projects/foo/tenants/bar/profiles/baz". + string name = 1 [(google.api.field_behavior) = REQUIRED]; } // Update profile request message UpdateProfileRequest { // Required. Profile to be updated. - Profile profile = 1; + Profile profile = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. A field mask to specify the profile fields to update. + // A field mask to specify the profile fields to update. // // A full update is performed if it is unset. // @@ -171,10 +191,13 @@ message UpdateProfileRequest { // // * external_id // * source + // * source_types // * uri // * is_hirable // * create_time // * update_time + // * candidate_update_time + // * resume_update_time // * resume // * person_names // * addresses @@ -220,45 +243,45 @@ message DeleteProfileRequest { // Required. Resource name of the profile to be deleted. // // The format is - // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}", - // for example, "projects/api-test-project/tenants/foo/profiles/bar". - string name = 1; + // "projects/{project_id}/tenants/{tenant_id}/profiles/{profile_id}". For + // example, "projects/foo/tenants/bar/profiles/baz". + string name = 1 [(google.api.field_behavior) = REQUIRED]; } // The request body of the `SearchProfiles` call. message SearchProfilesRequest { // Required. The resource name of the tenant to search within. // - // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenants/foo". - string parent = 1; + // The format is "projects/{project_id}/tenants/{tenant_id}". For example, + // "projects/foo/tenants/bar". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The meta information collected about the profile search user. // This is used to improve the search quality of the service. These values are // provided by users, and must be precise and consistent. - RequestMetadata request_metadata = 2; + RequestMetadata request_metadata = 2 [(google.api.field_behavior) = REQUIRED]; - // Optional. Search query to execute. See + // Search query to execute. See // [ProfileQuery][google.cloud.talent.v4beta1.ProfileQuery] for more details. ProfileQuery profile_query = 3; - // Optional. A limit on the number of profiles returned in the search results. + // A limit on the number of profiles returned in the search results. // A value above the default value 10 can increase search response time. // // The maximum value allowed is 100. Otherwise an error is thrown. int32 page_size = 4; - // Optional. The pageToken, similar to offset enables users of the API to - // paginate through the search results. To retrieve the first page of results, - // set the pageToken to empty. The search response includes a + // The pageToken, similar to offset enables users of the API to paginate + // through the search results. To retrieve the first page of results, set the + // pageToken to empty. The search response includes a // [nextPageToken][google.cloud.talent.v4beta1.SearchProfilesResponse.next_page_token] // field that can be used to populate the pageToken field for the next page of // results. Using pageToken instead of offset increases the performance of the // API, especially compared to larger offset values. string page_token = 5; - // Optional. An integer that specifies the current offset (that is, starting - // result) in search results. This field is only considered if + // An integer that specifies the current offset (that is, starting result) in + // search results. This field is only considered if // [page_token][google.cloud.talent.v4beta1.SearchProfilesRequest.page_token] // is unset. // @@ -269,13 +292,13 @@ message SearchProfilesRequest { // pageSize = 10 and offset = 10 means to search from the second page. int32 offset = 6; - // Optional. This flag controls the spell-check feature. If `false`, the + // This flag controls the spell-check feature. If `false`, the // service attempts to correct a misspelled query. // // For example, "enginee" is corrected to "engineer". bool disable_spell_check = 7; - // Optional. The criteria that determines how search results are sorted. + // The criteria that determines how search results are sorted. // Defaults is "relevance desc" if no value is specified. // // Supported options are: @@ -306,17 +329,16 @@ message SearchProfilesRequest { // in ascending order. string order_by = 8; - // Optional. When sort by field is based on alphabetical order, sort values - // case sensitively (based on ASCII) when the value is set to true. Default - // value is case in-sensitive sort (false). + // When sort by field is based on alphabetical order, sort values case + // sensitively (based on ASCII) when the value is set to true. Default value + // is case in-sensitive sort (false). bool case_sensitive_sort = 9; - // Optional. A list of expressions specifies histogram requests against - // matching profiles for + // A list of expressions specifies histogram requests against matching + // profiles for // [SearchProfilesRequest][google.cloud.talent.v4beta1.SearchProfilesRequest]. // - // The expression syntax looks like a function definition with optional - // parameters. + // The expression syntax looks like a function definition with parameters. // // Function syntax: function_name(histogram_facet[, list of buckets]) // @@ -395,7 +417,7 @@ message SearchProfilesRequest { // [bucket(MIN, 0, "negative"), bucket(0, MAX, "non-negative")]) repeated HistogramQuery histogram_queries = 10; - // Optional. An id that uniquely identifies the result set of a + // An id that uniquely identifies the result set of a // [SearchProfiles][google.cloud.talent.v4beta1.ProfileService.SearchProfiles] // call. The id should be retrieved from the // [SearchProfilesResponse][google.cloud.talent.v4beta1.SearchProfilesResponse] @@ -427,7 +449,7 @@ message SearchProfilesRequest { // to page through the results. string result_set_id = 12; - // Optional. This flag is used to indicate whether the service will attempt to + // This flag is used to indicate whether the service will attempt to // understand synonyms and terms related to the search query or treat the // query "as is" when it generates a set of results. By default this flag is // set to false, thus allowing expanded results to also be returned. For @@ -475,8 +497,6 @@ message SearchProfilesResponse { string result_set_id = 7; } -// Output only. -// // Profile entry with metadata inside // [SearchProfilesResponse][google.cloud.talent.v4beta1.SearchProfilesResponse]. message SummarizedProfile { diff --git a/talent/google/cloud/talent_v4beta1/proto/profile_service_pb2.py b/talent/google/cloud/talent_v4beta1/proto/profile_service_pb2.py index 3d58e75fc6db..b23d58043f1f 100644 --- a/talent/google/cloud/talent_v4beta1/proto/profile_service_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/profile_service_pb2.py @@ -17,6 +17,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.talent_v4beta1.proto import ( common_pb2 as google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2, ) @@ -29,6 +30,7 @@ from google.cloud.talent_v4beta1.proto import ( profile_pb2 as google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_profile__pb2, ) +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 @@ -41,15 +43,17 @@ "\n\037com.google.cloud.talent.v4beta1B\023ProfileServiceProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n7google/cloud/talent_v4beta1/proto/profile_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a/google/cloud/talent_v4beta1/proto/filters.proto\x1a\x31google/cloud/talent_v4beta1/proto/histogram.proto\x1a/google/cloud/talent_v4beta1/proto/profile.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"{\n\x13ListProfilesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12-\n\tread_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"g\n\x14ListProfilesResponse\x12\x36\n\x08profiles\x18\x01 \x03(\x0b\x32$.google.cloud.talent.v4beta1.Profile\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"]\n\x14\x43reateProfileRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x35\n\x07profile\x18\x02 \x01(\x0b\x32$.google.cloud.talent.v4beta1.Profile"!\n\x11GetProfileRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"~\n\x14UpdateProfileRequest\x12\x35\n\x07profile\x18\x01 \x01(\x0b\x32$.google.cloud.talent.v4beta1.Profile\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"$\n\x14\x44\x65leteProfileRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\xb3\x03\n\x15SearchProfilesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x46\n\x10request_metadata\x18\x02 \x01(\x0b\x32,.google.cloud.talent.v4beta1.RequestMetadata\x12@\n\rprofile_query\x18\x03 \x01(\x0b\x32).google.cloud.talent.v4beta1.ProfileQuery\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12\x1b\n\x13\x64isable_spell_check\x18\x07 \x01(\x08\x12\x10\n\x08order_by\x18\x08 \x01(\t\x12\x1b\n\x13\x63\x61se_sensitive_sort\x18\t \x01(\x08\x12\x46\n\x11histogram_queries\x18\n \x03(\x0b\x32+.google.cloud.talent.v4beta1.HistogramQuery\x12\x15\n\rresult_set_id\x18\x0c \x01(\t\x12\x1e\n\x16strict_keywords_search\x18\r \x01(\x08"\x93\x03\n\x16SearchProfilesResponse\x12\x1c\n\x14\x65stimated_total_size\x18\x01 \x01(\x03\x12I\n\x10spell_correction\x18\x02 \x01(\x0b\x32/.google.cloud.talent.v4beta1.SpellingCorrection\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata\x12\x17\n\x0fnext_page_token\x18\x04 \x01(\t\x12R\n\x17histogram_query_results\x18\x05 \x03(\x0b\x32\x31.google.cloud.talent.v4beta1.HistogramQueryResult\x12K\n\x13summarized_profiles\x18\x06 \x03(\x0b\x32..google.cloud.talent.v4beta1.SummarizedProfile\x12\x15\n\rresult_set_id\x18\x07 \x01(\t"\x82\x01\n\x11SummarizedProfile\x12\x36\n\x08profiles\x18\x01 \x03(\x0b\x32$.google.cloud.talent.v4beta1.Profile\x12\x35\n\x07summary\x18\x02 \x01(\x0b\x32$.google.cloud.talent.v4beta1.Profile2\xed\x08\n\x0eProfileService\x12\xac\x01\n\x0cListProfiles\x12\x30.google.cloud.talent.v4beta1.ListProfilesRequest\x1a\x31.google.cloud.talent.v4beta1.ListProfilesResponse"7\x82\xd3\xe4\x93\x02\x31\x12//v4beta1/{parent=projects/*/tenants/*}/profiles\x12\xa4\x01\n\rCreateProfile\x12\x31.google.cloud.talent.v4beta1.CreateProfileRequest\x1a$.google.cloud.talent.v4beta1.Profile":\x82\xd3\xe4\x93\x02\x34"//v4beta1/{parent=projects/*/tenants/*}/profiles:\x01*\x12\x9b\x01\n\nGetProfile\x12..google.cloud.talent.v4beta1.GetProfileRequest\x1a$.google.cloud.talent.v4beta1.Profile"7\x82\xd3\xe4\x93\x02\x31\x12//v4beta1/{name=projects/*/tenants/*/profiles/*}\x12\xac\x01\n\rUpdateProfile\x12\x31.google.cloud.talent.v4beta1.UpdateProfileRequest\x1a$.google.cloud.talent.v4beta1.Profile"B\x82\xd3\xe4\x93\x02<27/v4beta1/{profile.name=projects/*/tenants/*/profiles/*}:\x01*\x12\x93\x01\n\rDeleteProfile\x12\x31.google.cloud.talent.v4beta1.DeleteProfileRequest\x1a\x16.google.protobuf.Empty"7\x82\xd3\xe4\x93\x02\x31*//v4beta1/{name=projects/*/tenants/*/profiles/*}\x12\xb3\x01\n\x0eSearchProfiles\x12\x32.google.cloud.talent.v4beta1.SearchProfilesRequest\x1a\x33.google.cloud.talent.v4beta1.SearchProfilesResponse"8\x82\xd3\xe4\x93\x02\x32"-/v4beta1/{parent=projects/*/tenants/*}:search:\x01*\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB\x81\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x13ProfileServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n7google/cloud/talent_v4beta1/proto/profile_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a/google/cloud/talent_v4beta1/proto/filters.proto\x1a\x31google/cloud/talent_v4beta1/proto/histogram.proto\x1a/google/cloud/talent_v4beta1/proto/profile.proto\x1a\x19google/protobuf/any.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x90\x01\n\x13ListProfilesRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12-\n\tread_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"g\n\x14ListProfilesResponse\x12\x36\n\x08profiles\x18\x01 \x03(\x0b\x32$.google.cloud.talent.v4beta1.Profile\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"g\n\x14\x43reateProfileRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12:\n\x07profile\x18\x02 \x01(\x0b\x32$.google.cloud.talent.v4beta1.ProfileB\x03\xe0\x41\x02"&\n\x11GetProfileRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"\x83\x01\n\x14UpdateProfileRequest\x12:\n\x07profile\x18\x01 \x01(\x0b\x32$.google.cloud.talent.v4beta1.ProfileB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask")\n\x14\x44\x65leteProfileRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"\xbd\x03\n\x15SearchProfilesRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12K\n\x10request_metadata\x18\x02 \x01(\x0b\x32,.google.cloud.talent.v4beta1.RequestMetadataB\x03\xe0\x41\x02\x12@\n\rprofile_query\x18\x03 \x01(\x0b\x32).google.cloud.talent.v4beta1.ProfileQuery\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12\x1b\n\x13\x64isable_spell_check\x18\x07 \x01(\x08\x12\x10\n\x08order_by\x18\x08 \x01(\t\x12\x1b\n\x13\x63\x61se_sensitive_sort\x18\t \x01(\x08\x12\x46\n\x11histogram_queries\x18\n \x03(\x0b\x32+.google.cloud.talent.v4beta1.HistogramQuery\x12\x15\n\rresult_set_id\x18\x0c \x01(\t\x12\x1e\n\x16strict_keywords_search\x18\r \x01(\x08"\x93\x03\n\x16SearchProfilesResponse\x12\x1c\n\x14\x65stimated_total_size\x18\x01 \x01(\x03\x12I\n\x10spell_correction\x18\x02 \x01(\x0b\x32/.google.cloud.talent.v4beta1.SpellingCorrection\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata\x12\x17\n\x0fnext_page_token\x18\x04 \x01(\t\x12R\n\x17histogram_query_results\x18\x05 \x03(\x0b\x32\x31.google.cloud.talent.v4beta1.HistogramQueryResult\x12K\n\x13summarized_profiles\x18\x06 \x03(\x0b\x32..google.cloud.talent.v4beta1.SummarizedProfile\x12\x15\n\rresult_set_id\x18\x07 \x01(\t"\x82\x01\n\x11SummarizedProfile\x12\x36\n\x08profiles\x18\x01 \x03(\x0b\x32$.google.cloud.talent.v4beta1.Profile\x12\x35\n\x07summary\x18\x02 \x01(\x0b\x32$.google.cloud.talent.v4beta1.Profile2\xed\x08\n\x0eProfileService\x12\xac\x01\n\x0cListProfiles\x12\x30.google.cloud.talent.v4beta1.ListProfilesRequest\x1a\x31.google.cloud.talent.v4beta1.ListProfilesResponse"7\x82\xd3\xe4\x93\x02\x31\x12//v4beta1/{parent=projects/*/tenants/*}/profiles\x12\xa4\x01\n\rCreateProfile\x12\x31.google.cloud.talent.v4beta1.CreateProfileRequest\x1a$.google.cloud.talent.v4beta1.Profile":\x82\xd3\xe4\x93\x02\x34"//v4beta1/{parent=projects/*/tenants/*}/profiles:\x01*\x12\x9b\x01\n\nGetProfile\x12..google.cloud.talent.v4beta1.GetProfileRequest\x1a$.google.cloud.talent.v4beta1.Profile"7\x82\xd3\xe4\x93\x02\x31\x12//v4beta1/{name=projects/*/tenants/*/profiles/*}\x12\xac\x01\n\rUpdateProfile\x12\x31.google.cloud.talent.v4beta1.UpdateProfileRequest\x1a$.google.cloud.talent.v4beta1.Profile"B\x82\xd3\xe4\x93\x02<27/v4beta1/{profile.name=projects/*/tenants/*/profiles/*}:\x01*\x12\x93\x01\n\rDeleteProfile\x12\x31.google.cloud.talent.v4beta1.DeleteProfileRequest\x1a\x16.google.protobuf.Empty"7\x82\xd3\xe4\x93\x02\x31*//v4beta1/{name=projects/*/tenants/*/profiles/*}\x12\xb3\x01\n\x0eSearchProfiles\x12\x32.google.cloud.talent.v4beta1.SearchProfilesRequest\x1a\x33.google.cloud.talent.v4beta1.SearchProfilesResponse"8\x82\xd3\xe4\x93\x02\x32"-/v4beta1/{parent=projects/*/tenants/*}:search:\x01*\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB\x81\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x13ProfileServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_filters__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_histogram__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_profile__pb2.DESCRIPTOR, + google_dot_protobuf_dot_any__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, ], @@ -78,13 +82,31 @@ containing_type=None, is_extension=False, extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.cloud.talent.v4beta1.ListProfilesRequest.filter", + index=1, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="page_token", full_name="google.cloud.talent.v4beta1.ListProfilesRequest.page_token", - index=1, + index=2, number=2, type=9, cpp_type=9, @@ -102,7 +124,7 @@ _descriptor.FieldDescriptor( name="page_size", full_name="google.cloud.talent.v4beta1.ListProfilesRequest.page_size", - index=2, + index=3, number=3, type=5, cpp_type=1, @@ -120,7 +142,7 @@ _descriptor.FieldDescriptor( name="read_mask", full_name="google.cloud.talent.v4beta1.ListProfilesRequest.read_mask", - index=3, + index=4, number=4, type=11, cpp_type=10, @@ -144,8 +166,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=403, - serialized_end=526, + serialized_start=464, + serialized_end=608, ) @@ -201,8 +223,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=528, - serialized_end=631, + serialized_start=610, + serialized_end=713, ) @@ -228,7 +250,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -246,7 +268,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -258,8 +280,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=633, - serialized_end=726, + serialized_start=715, + serialized_end=818, ) @@ -285,7 +307,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -297,8 +319,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=728, - serialized_end=761, + serialized_start=820, + serialized_end=858, ) @@ -324,7 +346,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -354,8 +376,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=763, - serialized_end=889, + serialized_start=861, + serialized_end=992, ) @@ -381,7 +403,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -393,8 +415,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=891, - serialized_end=927, + serialized_start=994, + serialized_end=1035, ) @@ -420,7 +442,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -438,7 +460,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -630,8 +652,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=930, - serialized_end=1365, + serialized_start=1038, + serialized_end=1483, ) @@ -777,8 +799,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1368, - serialized_end=1771, + serialized_start=1486, + serialized_end=1889, ) @@ -834,8 +856,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1774, - serialized_end=1904, + serialized_start=1892, + serialized_end=2022, ) _LISTPROFILESREQUEST.fields_by_name[ @@ -926,21 +948,29 @@ parent: Required. The resource name of the tenant under which the profile is created. The format is - "projects/{project\_id}/tenants/{tenant\_id}", for example, - "projects/api-test-project/tenants/foo". + "projects/{project\_id}/tenants/{tenant\_id}". For example, + "projects/foo/tenants/bar". + filter: + The filter string specifies the profiles to be enumerated. + Supported operator: =, AND The field(s) eligible for + filtering are: - ``externalId`` - ``groupId`` externalId + and groupId cannot be specified at the same time. If both + externalId and groupId are provided, the API will return a bad + request error. Sample Query: - externalId = "externalId-1" + - groupId = "groupId-1" page_token: - Optional. The token that specifies the current offset (that - is, starting result). Please set the value to [ListProfilesRe - sponse.next\_page\_token][google.cloud.talent.v4beta1.ListProf - ilesResponse.next\_page\_token] to continue the list. + The token that specifies the current offset (that is, starting + result). Please set the value to [ListProfilesResponse.next\_ + page\_token][google.cloud.talent.v4beta1.ListProfilesResponse. + next\_page\_token] to continue the list. page_size: - Optional. The maximum number of profiles to be returned, at - most 100. Default is 100 unless a positive number smaller - than 100 is specified. + The maximum number of profiles to be returned, at most 100. + Default is 100 unless a positive number smaller than 100 is + specified. read_mask: - Optional. A field mask to specify the profile fields to be - listed in response. All fields are listed if it is unset. - Valid values are: - name + A field mask to specify the profile fields to be listed in + response. All fields are listed if it is unset. Valid values + are: - name """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.ListProfilesRequest) ), @@ -980,8 +1010,8 @@ Attributes: parent: Required. The name of the tenant this profile belongs to. The - format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenants/foo". + format is "projects/{project\_id}/tenants/{tenant\_id}". For + example, "projects/foo/tenants/bar". profile: Required. The profile to be created. """, @@ -1003,8 +1033,7 @@ name: Required. Resource name of the profile to get. The format is "projects/{project\_id}/tenants/{tenant\_id}/profiles/{profile - \_id}", for example, "projects/api-test- - project/tenants/foo/profiles/bar". + \_id}". For example, "projects/foo/tenants/bar/profiles/baz". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.GetProfileRequest) ), @@ -1024,24 +1053,24 @@ profile: Required. Profile to be updated. update_mask: - Optional. A field mask to specify the profile fields to - update. A full update is performed if it is unset. Valid - values are: - external\_id - source - uri - is\_hirable - - create\_time - update\_time - resume - person\_names - - addresses - email\_addresses - phone\_numbers - - personal\_uris - additional\_contact\_info - - employment\_records - education\_records - skills - - activities - publications - patents - certifications - - recruiting\_notes - custom\_attributes - group\_id - - external\_system - source\_note - primary\_responsibilities - - citizenships - work\_authorizations - employee\_types - - language\_code - qualification\_summary - - allowed\_contact\_types - preferred\_contact\_types - - contact\_availability - language\_fluencies - - work\_preference - industry\_experiences - - work\_environment\_experiences - work\_availability - - security\_clearances - references - assessments - - interviews + A field mask to specify the profile fields to update. A full + update is performed if it is unset. Valid values are: - + external\_id - source - source\_types - uri - is\_hirable + - create\_time - update\_time - candidate\_update\_time - + resume\_update\_time - resume - person\_names - addresses - + email\_addresses - phone\_numbers - personal\_uris - + additional\_contact\_info - employment\_records - + education\_records - skills - activities - publications - + patents - certifications - recruiting\_notes - + custom\_attributes - group\_id - external\_system - + source\_note - primary\_responsibilities - citizenships - + work\_authorizations - employee\_types - language\_code - + qualification\_summary - allowed\_contact\_types - + preferred\_contact\_types - contact\_availability - + language\_fluencies - work\_preference - + industry\_experiences - work\_environment\_experiences - + work\_availability - security\_clearances - references - + assessments - interviews """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.UpdateProfileRequest) ), @@ -1061,8 +1090,8 @@ name: Required. Resource name of the profile to be deleted. The format is "projects/{project\_id}/tenants/{tenant\_id}/profile - s/{profile\_id}", for example, "projects/api-test- - project/tenants/foo/profiles/bar". + s/{profile\_id}". For example, + "projects/foo/tenants/bar/profiles/baz". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.DeleteProfileRequest) ), @@ -1081,35 +1110,35 @@ Attributes: parent: Required. The resource name of the tenant to search within. - The format is "projects/{project\_id}/tenants/{tenant\_id}", - for example, "projects/api-test-project/tenants/foo". + The format is "projects/{project\_id}/tenants/{tenant\_id}". + For example, "projects/foo/tenants/bar". request_metadata: Required. The meta information collected about the profile search user. This is used to improve the search quality of the service. These values are provided by users, and must be precise and consistent. profile_query: - Optional. Search query to execute. See + Search query to execute. See [ProfileQuery][google.cloud.talent.v4beta1.ProfileQuery] for more details. page_size: - Optional. A limit on the number of profiles returned in the - search results. A value above the default value 10 can - increase search response time. The maximum value allowed is - 100. Otherwise an error is thrown. + A limit on the number of profiles returned in the search + results. A value above the default value 10 can increase + search response time. The maximum value allowed is 100. + Otherwise an error is thrown. page_token: - Optional. The pageToken, similar to offset enables users of - the API to paginate through the search results. To retrieve - the first page of results, set the pageToken to empty. The - search response includes a [nextPageToken][google.cloud.talent - .v4beta1.SearchProfilesResponse.next\_page\_token] field that - can be used to populate the pageToken field for the next page - of results. Using pageToken instead of offset increases the + The pageToken, similar to offset enables users of the API to + paginate through the search results. To retrieve the first + page of results, set the pageToken to empty. The search + response includes a [nextPageToken][google.cloud.talent.v4beta + 1.SearchProfilesResponse.next\_page\_token] field that can be + used to populate the pageToken field for the next page of + results. Using pageToken instead of offset increases the performance of the API, especially compared to larger offset values. offset: - Optional. An integer that specifies the current offset (that - is, starting result) in search results. This field is only + An integer that specifies the current offset (that is, + starting result) in search results. This field is only considered if [page\_token][google.cloud.talent.v4beta1.Search ProfilesRequest.page\_token] is unset. The maximum allowed value is 5000. Otherwise an error is thrown. For example, 0 @@ -1118,12 +1147,12 @@ example pageSize = 10 and offset = 10 means to search from the second page. disable_spell_check: - Optional. This flag controls the spell-check feature. If - ``false``, the service attempts to correct a misspelled query. - For example, "enginee" is corrected to "engineer". + This flag controls the spell-check feature. If ``false``, the + service attempts to correct a misspelled query. For example, + "enginee" is corrected to "engineer". order_by: - Optional. The criteria that determines how search results are - sorted. Defaults is "relevance desc" if no value is specified. + The criteria that determines how search results are sorted. + Defaults is "relevance desc" if no value is specified. Supported options are: - "relevance desc": By descending relevance, as determined by the API algorithms. - "update\_date desc": Sort by [Profile.update\_time][google. @@ -1144,18 +1173,16 @@ .family\_name][google.cloud.talent.v4beta1.PersonName.PersonSt ructuredName.family\_name] in ascending order. case_sensitive_sort: - Optional. When sort by field is based on alphabetical order, - sort values case sensitively (based on ASCII) when the value - is set to true. Default value is case in-sensitive sort - (false). + When sort by field is based on alphabetical order, sort values + case sensitively (based on ASCII) when the value is set to + true. Default value is case in-sensitive sort (false). histogram_queries: - Optional. A list of expressions specifies histogram requests - against matching profiles for [SearchProfilesRequest][google.c - loud.talent.v4beta1.SearchProfilesRequest]. The expression - syntax looks like a function definition with optional - parameters. Function syntax: - function\_name(histogram\_facet[, list of buckets]) Data - types: - Histogram facet: facet names with format + A list of expressions specifies histogram requests against + matching profiles for [SearchProfilesRequest][google.cloud.tal + ent.v4beta1.SearchProfilesRequest]. The expression syntax + looks like a function definition with parameters. Function + syntax: function\_name(histogram\_facet[, list of buckets]) + Data types: - Histogram facet: facet names with format [a-zA-Z][a-zA-Z0-9\_]+. - String: string like "any string with backslash escape for quote(")." - Number: whole number and floating point number like 10, -1 and -0.01. - List: list @@ -1217,21 +1244,21 @@ [bucket(MIN, 0, "negative"), bucket(0, MAX, "non- negative")]) result_set_id: - Optional. An id that uniquely identifies the result set of a [ - SearchProfiles][google.cloud.talent.v4beta1.ProfileService.Sea - rchProfiles] call. The id should be retrieved from the [Search - ProfilesResponse][google.cloud.talent.v4beta1.SearchProfilesRe - sponse] message returned from a previous invocation of [Search - Profiles][google.cloud.talent.v4beta1.ProfileService.SearchPro - files]. A result set is an ordered list of search results. - If this field is not set, a new result set is computed based - on the [profile\_query][google.cloud.talent.v4beta1.SearchProf - ilesRequest.profile\_query]. A new [result\_set\_id][google.cl - oud.talent.v4beta1.SearchProfilesRequest.result\_set\_id] is - returned as a handle to access this result set. If this field - is set, the service will ignore the resource and [profile\_que - ry][google.cloud.talent.v4beta1.SearchProfilesRequest.profile\ - _query] values, and simply retrieve a page of results from the + An id that uniquely identifies the result set of a [SearchProf + iles][google.cloud.talent.v4beta1.ProfileService.SearchProfile + s] call. The id should be retrieved from the [SearchProfilesRe + sponse][google.cloud.talent.v4beta1.SearchProfilesResponse] + message returned from a previous invocation of [SearchProfiles + ][google.cloud.talent.v4beta1.ProfileService.SearchProfiles]. + A result set is an ordered list of search results. If this + field is not set, a new result set is computed based on the [p + rofile\_query][google.cloud.talent.v4beta1.SearchProfilesReque + st.profile\_query]. A new [result\_set\_id][google.cloud.talen + t.v4beta1.SearchProfilesRequest.result\_set\_id] is returned + as a handle to access this result set. If this field is set, + the service will ignore the resource and [profile\_query][goog + le.cloud.talent.v4beta1.SearchProfilesRequest.profile\_query] + values, and simply retrieve a page of results from the corresponding result set. In this case, one and only one of [p age\_token][google.cloud.talent.v4beta1.SearchProfilesRequest. page\_token] or [offset][google.cloud.talent.v4beta1.SearchPro @@ -1243,17 +1270,17 @@ d.talent.v4beta1.SearchProfilesResponse] to page through the results. strict_keywords_search: - Optional. This flag is used to indicate whether the service - will attempt to understand synonyms and terms related to the - search query or treat the query "as is" when it generates a - set of results. By default this flag is set to false, thus - allowing expanded results to also be returned. For example a - search for "software engineer" might also return candidates - who have experience in jobs similar to software engineer - positions. By setting this flag to true, the service will only - attempt to deliver candidates has software engineer in his/her - global fields by treating "software engineer" as a keyword. - It is recommended to provide a feature in the UI (such as a + This flag is used to indicate whether the service will attempt + to understand synonyms and terms related to the search query + or treat the query "as is" when it generates a set of results. + By default this flag is set to false, thus allowing expanded + results to also be returned. For example a search for + "software engineer" might also return candidates who have + experience in jobs similar to software engineer positions. By + setting this flag to true, the service will only attempt to + deliver candidates has software engineer in his/her global + fields by treating "software engineer" as a keyword. It is + recommended to provide a feature in the UI (such as a checkbox) to allow recruiters to set this flag to true if they intend to search for longer boolean strings. """, @@ -1306,9 +1333,7 @@ dict( DESCRIPTOR=_SUMMARIZEDPROFILE, __module__="google.cloud.talent_v4beta1.proto.profile_service_pb2", - __doc__="""Output only. - - Profile entry with metadata inside + __doc__="""Profile entry with metadata inside [SearchProfilesResponse][google.cloud.talent.v4beta1.SearchProfilesResponse]. @@ -1338,6 +1363,14 @@ DESCRIPTOR._options = None +_LISTPROFILESREQUEST.fields_by_name["parent"]._options = None +_CREATEPROFILEREQUEST.fields_by_name["parent"]._options = None +_CREATEPROFILEREQUEST.fields_by_name["profile"]._options = None +_GETPROFILEREQUEST.fields_by_name["name"]._options = None +_UPDATEPROFILEREQUEST.fields_by_name["profile"]._options = None +_DELETEPROFILEREQUEST.fields_by_name["name"]._options = None +_SEARCHPROFILESREQUEST.fields_by_name["parent"]._options = None +_SEARCHPROFILESREQUEST.fields_by_name["request_metadata"]._options = None _PROFILESERVICE = _descriptor.ServiceDescriptor( name="ProfileService", @@ -1347,8 +1380,8 @@ serialized_options=_b( "\312A\023jobs.googleapis.com\322AShttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobs" ), - serialized_start=1907, - serialized_end=3040, + serialized_start=2025, + serialized_end=3158, methods=[ _descriptor.MethodDescriptor( name="ListProfiles", diff --git a/talent/google/cloud/talent_v4beta1/proto/tenant.proto b/talent/google/cloud/talent_v4beta1/proto/tenant.proto index 8715d201abb9..07b8e0bc488e 100644 --- a/talent/google/cloud/talent_v4beta1/proto/tenant.proto +++ b/talent/google/cloud/talent_v4beta1/proto/tenant.proto @@ -18,6 +18,7 @@ syntax = "proto3"; package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent"; @@ -51,24 +52,24 @@ message Tenant { // tenant is created. // // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenants/foo". + // "projects/foo/tenants/bar". string name = 1; // Required. Client side tenant identifier, used to uniquely identify the // tenant. // // The maximum number of allowed characters is 255. - string external_id = 2; + string external_id = 2 [(google.api.field_behavior) = REQUIRED]; - // Optional. Indicates whether data owned by this tenant may be used to - // provide product improvements across other tenants. + // Indicates whether data owned by this tenant may be used to provide product + // improvements across other tenants. // // Defaults behavior is // [DataUsageType.ISOLATED][google.cloud.talent.v4beta1.Tenant.DataUsageType.ISOLATED] // if it's unset. DataUsageType usage_type = 3; - // Optional. A list of keys of filterable + // A list of keys of filterable // [Profile.custom_attributes][google.cloud.talent.v4beta1.Profile.custom_attributes], // whose corresponding `string_values` are used in keyword searches. Profiles // with `string_values` under these specified field keys are returned if any diff --git a/talent/google/cloud/talent_v4beta1/proto/tenant_pb2.py b/talent/google/cloud/talent_v4beta1/proto/tenant_pb2.py index 338879d85eab..eb81523f5523 100644 --- a/talent/google/cloud/talent_v4beta1/proto/tenant_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/tenant_pb2.py @@ -16,6 +16,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -27,10 +28,11 @@ "\n\037com.google.cloud.talent.v4beta1B\023TenantResourceProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n.google/cloud/talent_v4beta1/proto/tenant.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xf8\x01\n\x06Tenant\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x65xternal_id\x18\x02 \x01(\t\x12\x45\n\nusage_type\x18\x03 \x01(\x0e\x32\x31.google.cloud.talent.v4beta1.Tenant.DataUsageType\x12\x34\n,keyword_searchable_profile_custom_attributes\x18\x04 \x03(\t"N\n\rDataUsageType\x12\x1f\n\x1b\x44\x41TA_USAGE_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nAGGREGATED\x10\x01\x12\x0c\n\x08ISOLATED\x10\x02\x42\x81\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x13TenantResourceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n.google/cloud/talent_v4beta1/proto/tenant.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xfd\x01\n\x06Tenant\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x0b\x65xternal_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x45\n\nusage_type\x18\x03 \x01(\x0e\x32\x31.google.cloud.talent.v4beta1.Tenant.DataUsageType\x12\x34\n,keyword_searchable_profile_custom_attributes\x18\x04 \x03(\t"N\n\rDataUsageType\x12\x1f\n\x1b\x44\x41TA_USAGE_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nAGGREGATED\x10\x01\x12\x0c\n\x08ISOLATED\x10\x02\x42\x81\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x13TenantResourceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, ], ) @@ -58,8 +60,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=313, - serialized_end=391, + serialized_start=351, + serialized_end=429, ) _sym_db.RegisterEnumDescriptor(_TENANT_DATAUSAGETYPE) @@ -104,7 +106,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -152,8 +154,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=143, - serialized_end=391, + serialized_start=176, + serialized_end=429, ) _TENANT.fields_by_name["usage_type"].enum_type = _TENANT_DATAUSAGETYPE @@ -179,25 +181,25 @@ tenant. This is generated by the service when a tenant is created. The format is "projects/{project\_id}/tenants/{tenant\_id}", for example, - "projects/api-test-project/tenants/foo". + "projects/foo/tenants/bar". external_id: Required. Client side tenant identifier, used to uniquely identify the tenant. The maximum number of allowed characters is 255. usage_type: - Optional. Indicates whether data owned by this tenant may be - used to provide product improvements across other tenants. - Defaults behavior is [DataUsageType.ISOLATED][google.cloud.tal - ent.v4beta1.Tenant.DataUsageType.ISOLATED] if it's unset. + Indicates whether data owned by this tenant may be used to + provide product improvements across other tenants. Defaults + behavior is [DataUsageType.ISOLATED][google.cloud.talent.v4bet + a1.Tenant.DataUsageType.ISOLATED] if it's unset. keyword_searchable_profile_custom_attributes: - Optional. A list of keys of filterable [Profile.custom\_attrib - utes][google.cloud.talent.v4beta1.Profile.custom\_attributes], - whose corresponding ``string_values`` are used in keyword - searches. Profiles with ``string_values`` under these - specified field keys are returned if any of the values match - the search keyword. Custom field values with parenthesis, - brackets and special symbols are not searchable as-is, and - must be surrounded by quotes. + A list of keys of filterable [Profile.custom\_attributes][goog + le.cloud.talent.v4beta1.Profile.custom\_attributes], whose + corresponding ``string_values`` are used in keyword searches. + Profiles with ``string_values`` under these specified field + keys are returned if any of the values match the search + keyword. Custom field values with parenthesis, brackets and + special symbols are not searchable as-is, and must be + surrounded by quotes. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.Tenant) ), @@ -206,4 +208,5 @@ DESCRIPTOR._options = None +_TENANT.fields_by_name["external_id"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/talent/google/cloud/talent_v4beta1/proto/tenant_service.proto b/talent/google/cloud/talent_v4beta1/proto/tenant_service.proto index 33b7f53845e4..94f834726fd0 100644 --- a/talent/google/cloud/talent_v4beta1/proto/tenant_service.proto +++ b/talent/google/cloud/talent_v4beta1/proto/tenant_service.proto @@ -19,6 +19,7 @@ package google.cloud.talent.v4beta1; import "google/api/annotations.proto"; import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/cloud/talent/v4beta1/common.proto"; import "google/cloud/talent/v4beta1/tenant.proto"; import "google/protobuf/empty.proto"; @@ -80,11 +81,11 @@ message CreateTenantRequest { // Required. Resource name of the project under which the tenant is created. // // The format is "projects/{project_id}", for example, - // "projects/api-test-project". - string parent = 1; + // "projects/foo". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The tenant to be created. - Tenant tenant = 2; + Tenant tenant = 2 [(google.api.field_behavior) = REQUIRED]; } // Request for getting a tenant by name. @@ -92,18 +93,17 @@ message GetTenantRequest { // Required. The resource name of the tenant to be retrieved. // // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenants/foo". - string name = 1; + // "projects/foo/tenants/bar". + string name = 1 [(google.api.field_behavior) = REQUIRED]; } // Request for updating a specified tenant. message UpdateTenantRequest { // Required. The tenant resource to replace the current resource in the // system. - Tenant tenant = 1; + Tenant tenant = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional but strongly recommended for the best service - // experience. + // Strongly recommended for the best service experience. // // If // [update_mask][google.cloud.talent.v4beta1.UpdateTenantRequest.update_mask] @@ -122,8 +122,8 @@ message DeleteTenantRequest { // Required. The resource name of the tenant to be deleted. // // The format is "projects/{project_id}/tenants/{tenant_id}", for example, - // "projects/api-test-project/tenants/foo". - string name = 1; + // "projects/foo/tenants/bar". + string name = 1 [(google.api.field_behavior) = REQUIRED]; } // List tenants for which the client has ACL visibility. @@ -131,19 +131,17 @@ message ListTenantsRequest { // Required. Resource name of the project under which the tenant is created. // // The format is "projects/{project_id}", for example, - // "projects/api-test-project". - string parent = 1; + // "projects/foo". + string parent = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. The starting indicator from which to return results. + // The starting indicator from which to return results. string page_token = 2; - // Optional. The maximum number of tenants to be returned, at most 100. + // The maximum number of tenants to be returned, at most 100. // Default is 100 if a non-positive number is provided. int32 page_size = 3; } -// Output only. -// // The List tenants response object. message ListTenantsResponse { // Tenants for the current client. diff --git a/talent/google/cloud/talent_v4beta1/proto/tenant_service_pb2.py b/talent/google/cloud/talent_v4beta1/proto/tenant_service_pb2.py index abfe7fa50d0e..62683496a0b3 100644 --- a/talent/google/cloud/talent_v4beta1/proto/tenant_service_pb2.py +++ b/talent/google/cloud/talent_v4beta1/proto/tenant_service_pb2.py @@ -17,6 +17,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.talent_v4beta1.proto import ( common_pb2 as google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2, ) @@ -35,11 +36,12 @@ "\n\037com.google.cloud.talent.v4beta1B\022TenantServiceProtoP\001ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\242\002\003CTS" ), serialized_pb=_b( - '\n6google/cloud/talent_v4beta1/proto/tenant_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a.google/cloud/talent_v4beta1/proto/tenant.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"Z\n\x13\x43reateTenantRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x33\n\x06tenant\x18\x02 \x01(\x0b\x32#.google.cloud.talent.v4beta1.Tenant" \n\x10GetTenantRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"{\n\x13UpdateTenantRequest\x12\x33\n\x06tenant\x18\x01 \x01(\x0b\x32#.google.cloud.talent.v4beta1.Tenant\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"#\n\x13\x44\x65leteTenantRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"K\n\x12ListTenantsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"\xa5\x01\n\x13ListTenantsResponse\x12\x34\n\x07tenants\x18\x01 \x03(\x0b\x32#.google.cloud.talent.v4beta1.Tenant\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata2\xf0\x06\n\rTenantService\x12\x96\x01\n\x0c\x43reateTenant\x12\x30.google.cloud.talent.v4beta1.CreateTenantRequest\x1a#.google.cloud.talent.v4beta1.Tenant"/\x82\xd3\xe4\x93\x02)"$/v4beta1/{parent=projects/*}/tenants:\x01*\x12\x8d\x01\n\tGetTenant\x12-.google.cloud.talent.v4beta1.GetTenantRequest\x1a#.google.cloud.talent.v4beta1.Tenant",\x82\xd3\xe4\x93\x02&\x12$/v4beta1/{name=projects/*/tenants/*}\x12\x9d\x01\n\x0cUpdateTenant\x12\x30.google.cloud.talent.v4beta1.UpdateTenantRequest\x1a#.google.cloud.talent.v4beta1.Tenant"6\x82\xd3\xe4\x93\x02\x30\x32+/v4beta1/{tenant.name=projects/*/tenants/*}:\x01*\x12\x86\x01\n\x0c\x44\x65leteTenant\x12\x30.google.cloud.talent.v4beta1.DeleteTenantRequest\x1a\x16.google.protobuf.Empty",\x82\xd3\xe4\x93\x02&*$/v4beta1/{name=projects/*/tenants/*}\x12\x9e\x01\n\x0bListTenants\x12/.google.cloud.talent.v4beta1.ListTenantsRequest\x1a\x30.google.cloud.talent.v4beta1.ListTenantsResponse",\x82\xd3\xe4\x93\x02&\x12$/v4beta1/{parent=projects/*}/tenants\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB\x80\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x12TenantServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n6google/cloud/talent_v4beta1/proto/tenant_service.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a.google/cloud/talent_v4beta1/proto/tenant.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"d\n\x13\x43reateTenantRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x38\n\x06tenant\x18\x02 \x01(\x0b\x32#.google.cloud.talent.v4beta1.TenantB\x03\xe0\x41\x02"%\n\x10GetTenantRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"\x80\x01\n\x13UpdateTenantRequest\x12\x38\n\x06tenant\x18\x01 \x01(\x0b\x32#.google.cloud.talent.v4beta1.TenantB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"(\n\x13\x44\x65leteTenantRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"P\n\x12ListTenantsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"\xa5\x01\n\x13ListTenantsResponse\x12\x34\n\x07tenants\x18\x01 \x03(\x0b\x32#.google.cloud.talent.v4beta1.Tenant\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\x12?\n\x08metadata\x18\x03 \x01(\x0b\x32-.google.cloud.talent.v4beta1.ResponseMetadata2\xf0\x06\n\rTenantService\x12\x96\x01\n\x0c\x43reateTenant\x12\x30.google.cloud.talent.v4beta1.CreateTenantRequest\x1a#.google.cloud.talent.v4beta1.Tenant"/\x82\xd3\xe4\x93\x02)"$/v4beta1/{parent=projects/*}/tenants:\x01*\x12\x8d\x01\n\tGetTenant\x12-.google.cloud.talent.v4beta1.GetTenantRequest\x1a#.google.cloud.talent.v4beta1.Tenant",\x82\xd3\xe4\x93\x02&\x12$/v4beta1/{name=projects/*/tenants/*}\x12\x9d\x01\n\x0cUpdateTenant\x12\x30.google.cloud.talent.v4beta1.UpdateTenantRequest\x1a#.google.cloud.talent.v4beta1.Tenant"6\x82\xd3\xe4\x93\x02\x30\x32+/v4beta1/{tenant.name=projects/*/tenants/*}:\x01*\x12\x86\x01\n\x0c\x44\x65leteTenant\x12\x30.google.cloud.talent.v4beta1.DeleteTenantRequest\x1a\x16.google.protobuf.Empty",\x82\xd3\xe4\x93\x02&*$/v4beta1/{name=projects/*/tenants/*}\x12\x9e\x01\n\x0bListTenants\x12/.google.cloud.talent.v4beta1.ListTenantsRequest\x1a\x30.google.cloud.talent.v4beta1.ListTenantsResponse",\x82\xd3\xe4\x93\x02&\x12$/v4beta1/{parent=projects/*}/tenants\x1al\xca\x41\x13jobs.googleapis.com\xd2\x41Shttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobsB\x80\x01\n\x1f\x63om.google.cloud.talent.v4beta1B\x12TenantServiceProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_tenant__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, @@ -70,7 +72,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -88,7 +90,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -100,8 +102,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=301, - serialized_end=391, + serialized_start=334, + serialized_end=434, ) @@ -127,7 +129,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -139,8 +141,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=393, - serialized_end=425, + serialized_start=436, + serialized_end=473, ) @@ -166,7 +168,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -196,8 +198,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=427, - serialized_end=550, + serialized_start=476, + serialized_end=604, ) @@ -223,7 +225,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -235,8 +237,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=552, - serialized_end=587, + serialized_start=606, + serialized_end=646, ) @@ -262,7 +264,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -310,8 +312,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=589, - serialized_end=664, + serialized_start=648, + serialized_end=728, ) @@ -385,8 +387,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=667, - serialized_end=832, + serialized_start=731, + serialized_end=896, ) _CREATETENANTREQUEST.fields_by_name[ @@ -427,7 +429,7 @@ parent: Required. Resource name of the project under which the tenant is created. The format is "projects/{project\_id}", for - example, "projects/api-test-project". + example, "projects/foo". tenant: Required. The tenant to be created. """, @@ -449,7 +451,7 @@ name: Required. The resource name of the tenant to be retrieved. The format is "projects/{project\_id}/tenants/{tenant\_id}", - for example, "projects/api-test-project/tenants/foo". + for example, "projects/foo/tenants/bar". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.GetTenantRequest) ), @@ -470,14 +472,14 @@ Required. The tenant resource to replace the current resource in the system. update_mask: - Optional but strongly recommended for the best service - experience. If [update\_mask][google.cloud.talent.v4beta1.Upd - ateTenantRequest.update\_mask] is provided, only the specified - fields in [tenant][google.cloud.talent.v4beta1.UpdateTenantReq - uest.tenant] are updated. Otherwise all the fields are - updated. A field mask to specify the tenant fields to be - updated. Only top level fields of - [Tenant][google.cloud.talent.v4beta1.Tenant] are supported. + Strongly recommended for the best service experience. If [upd + ate\_mask][google.cloud.talent.v4beta1.UpdateTenantRequest.upd + ate\_mask] is provided, only the specified fields in [tenant][ + google.cloud.talent.v4beta1.UpdateTenantRequest.tenant] are + updated. Otherwise all the fields are updated. A field mask + to specify the tenant fields to be updated. Only top level + fields of [Tenant][google.cloud.talent.v4beta1.Tenant] are + supported. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.UpdateTenantRequest) ), @@ -497,7 +499,7 @@ name: Required. The resource name of the tenant to be deleted. The format is "projects/{project\_id}/tenants/{tenant\_id}", for - example, "projects/api-test-project/tenants/foo". + example, "projects/foo/tenants/bar". """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.DeleteTenantRequest) ), @@ -517,12 +519,12 @@ parent: Required. Resource name of the project under which the tenant is created. The format is "projects/{project\_id}", for - example, "projects/api-test-project". + example, "projects/foo". page_token: - Optional. The starting indicator from which to return results. + The starting indicator from which to return results. page_size: - Optional. The maximum number of tenants to be returned, at - most 100. Default is 100 if a non-positive number is provided. + The maximum number of tenants to be returned, at most 100. + Default is 100 if a non-positive number is provided. """, # @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.ListTenantsRequest) ), @@ -535,9 +537,7 @@ dict( DESCRIPTOR=_LISTTENANTSRESPONSE, __module__="google.cloud.talent_v4beta1.proto.tenant_service_pb2", - __doc__="""Output only. - - The List tenants response object. + __doc__="""The List tenants response object. Attributes: @@ -556,6 +556,12 @@ DESCRIPTOR._options = None +_CREATETENANTREQUEST.fields_by_name["parent"]._options = None +_CREATETENANTREQUEST.fields_by_name["tenant"]._options = None +_GETTENANTREQUEST.fields_by_name["name"]._options = None +_UPDATETENANTREQUEST.fields_by_name["tenant"]._options = None +_DELETETENANTREQUEST.fields_by_name["name"]._options = None +_LISTTENANTSREQUEST.fields_by_name["parent"]._options = None _TENANTSERVICE = _descriptor.ServiceDescriptor( name="TenantService", @@ -565,8 +571,8 @@ serialized_options=_b( "\312A\023jobs.googleapis.com\322AShttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/jobs" ), - serialized_start=835, - serialized_end=1715, + serialized_start=899, + serialized_end=1779, methods=[ _descriptor.MethodDescriptor( name="CreateTenant", diff --git a/talent/setup.py b/talent/setup.py index 0476b9315445..1889a4a69be7 100644 --- a/talent/setup.py +++ b/talent/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-talent" description = "Google Cloud Talent Solution API client library" -version = "0.3.0" +version = "0.4.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/talent/synth.metadata b/talent/synth.metadata index 1dcb16adea28..f17be2929a12 100644 --- a/talent/synth.metadata +++ b/talent/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-28T12:36:29.083499Z", + "updateTime": "2019-10-30T12:33:54.698582Z", "sources": [ { "generator": { "name": "artman", - "version": "0.35.1", - "dockerImage": "googleapis/artman@sha256:b11c7ea0d0831c54016fb50f4b796d24d1971439b30fbc32a369ba1ac887c384" + "version": "0.41.0", + "dockerImage": "googleapis/artman@sha256:75b38a3b073a7b243545f2332463096624c802bb1e56b8cb6f22ba1ecd325fa9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "dbd38035c35083507e2f0b839985cf17e212cb1c", - "internalRef": "265796259" + "sha": "7c4cf35d5fe3b8ad664bd219edd6d9f28a788b64", + "internalRef": "277334937" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/tasks/CHANGELOG.md b/tasks/CHANGELOG.md index a910fc21f898..fbb900e07bbb 100644 --- a/tasks/CHANGELOG.md +++ b/tasks/CHANGELOG.md @@ -4,6 +4,25 @@ [1]: https://pypi.org/project/google-cloud-tasks/#history +## 1.3.0 + +11-04-2019 10:06 PST + +### Implementation Changes +- Add proto annotations (via synth) ([#9352](https://github.com/googleapis/google-cloud-python/pull/9352)) + +### New Features +- Add HTTP tasks, OAuth tokens, and OIDC tokens (via synth) ([#9588](https://github.com/googleapis/google-cloud-python/pull/9588)) + +### Documentation +- Tweak docstrings (via synth) ([#9433](https://github.com/googleapis/google-cloud-python/pull/9433)) +- Disambiguate client requests from cloud task requests ([#9398](https://github.com/googleapis/google-cloud-python/pull/9398)) +- Change requests intersphinx url (via synth) ([#9409](https://github.com/googleapis/google-cloud-python/pull/9409)) +- Update documentation (via synth) ([#9069](https://github.com/googleapis/google-cloud-python/pull/9069)) +- Remove compatibility badges from READMEs ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) + ## 1.2.1 08-12-2019 13:50 PDT diff --git a/tasks/docs/conf.py b/tasks/docs/conf.py index bac6547e691c..3435ea280162 100644 --- a/tasks/docs/conf.py +++ b/tasks/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py b/tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py index 078af3347965..7c530a809640 100644 --- a/tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py +++ b/tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py @@ -285,13 +285,13 @@ def list_queues( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. @@ -371,13 +371,13 @@ def get_queue( name (str): Required. The resource name of the queue. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Queue` instance. @@ -464,13 +464,13 @@ def create_queue( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2.types.Queue` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Queue` instance. @@ -563,13 +563,13 @@ def update_queue( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Queue` instance. @@ -646,13 +646,13 @@ def delete_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -718,13 +718,13 @@ def purge_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Queue` instance. @@ -792,13 +792,13 @@ def pause_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Queue` instance. @@ -871,13 +871,13 @@ def resume_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Queue` instance. @@ -954,13 +954,13 @@ def get_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Policy` instance. @@ -1047,13 +1047,13 @@ def set_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Policy` instance. @@ -1131,13 +1131,13 @@ def test_iam_permissions( information see `IAM Overview `__. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.TestIamPermissionsResponse` instance. @@ -1240,13 +1240,13 @@ def list_tasks( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. @@ -1338,13 +1338,13 @@ def get_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Task` instance. @@ -1463,13 +1463,13 @@ def create_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Task` instance. @@ -1539,13 +1539,13 @@ def delete_task( name (str): Required. The task name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -1638,13 +1638,13 @@ def run_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Task` instance. diff --git a/tasks/google/cloud/tasks_v2/proto/cloudtasks.proto b/tasks/google/cloud/tasks_v2/proto/cloudtasks.proto index f948426a3655..d30aae519a88 100644 --- a/tasks/google/cloud/tasks_v2/proto/cloudtasks.proto +++ b/tasks/google/cloud/tasks_v2/proto/cloudtasks.proto @@ -311,7 +311,7 @@ message ListQueuesRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "locations.googleapis.com/Location" + child_type: "cloudtasks.googleapis.com/Queue" } ]; @@ -388,7 +388,7 @@ message CreateQueueRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "locations.googleapis.com/Location" + child_type: "cloudtasks.googleapis.com/Queue" } ]; @@ -470,7 +470,7 @@ message ListTasksRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; @@ -560,7 +560,7 @@ message CreateTaskRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; diff --git a/tasks/google/cloud/tasks_v2/proto/cloudtasks_pb2.py b/tasks/google/cloud/tasks_v2/proto/cloudtasks_pb2.py index 55beb9139d88..0ced965d5056 100644 --- a/tasks/google/cloud/tasks_v2/proto/cloudtasks_pb2.py +++ b/tasks/google/cloud/tasks_v2/proto/cloudtasks_pb2.py @@ -39,7 +39,7 @@ "\n\031com.google.cloud.tasks.v2B\017CloudTasksProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks\242\002\005TASKS" ), serialized_pb=_b( - '\n,google/cloud/tasks_v2/proto/cloudtasks.proto\x12\x15google.cloud.tasks.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\'google/cloud/tasks_v2/proto/queue.proto\x1a&google/cloud/tasks_v2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x85\x01\n\x11ListQueuesRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!locations.googleapis.com/Location\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"[\n\x12ListQueuesResponse\x12,\n\x06queues\x18\x01 \x03(\x0b\x32\x1c.google.cloud.tasks.v2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x81\x01\n\x12\x43reateQueueRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!locations.googleapis.com/Location\x12\x30\n\x05queue\x18\x02 \x01(\x0b\x32\x1c.google.cloud.tasks.v2.QueueB\x03\xe0\x41\x02"w\n\x12UpdateQueueRequest\x12\x30\n\x05queue\x18\x01 \x01(\x0b\x32\x1c.google.cloud.tasks.v2.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xab\x01\n\x10ListTasksRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"X\n\x11ListTasksResponse\x12*\n\x05tasks\x18\x01 \x03(\x0b\x32\x1b.google.cloud.tasks.v2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x7f\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"\xb5\x01\n\x11\x43reateTaskRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12.\n\x04task\x18\x02 \x01(\x0b\x32\x1b.google.cloud.tasks.v2.TaskB\x03\xe0\x41\x02\x12\x37\n\rresponse_view\x18\x03 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\x7f\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View2\xdd\x14\n\nCloudTasks\x12\x9e\x01\n\nListQueues\x12(.google.cloud.tasks.v2.ListQueuesRequest\x1a).google.cloud.tasks.v2.ListQueuesResponse";\x82\xd3\xe4\x93\x02,\x12*/v2/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x8b\x01\n\x08GetQueue\x12&.google.cloud.tasks.v2.GetQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"9\x82\xd3\xe4\x93\x02,\x12*/v2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa0\x01\n\x0b\x43reateQueue\x12).google.cloud.tasks.v2.CreateQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"H\x82\xd3\xe4\x93\x02\x33"*/v2/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xab\x01\n\x0bUpdateQueue\x12).google.cloud.tasks.v2.UpdateQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"S\x82\xd3\xe4\x93\x02\x39\x32\x30/v2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x8b\x01\n\x0b\x44\x65leteQueue\x12).google.cloud.tasks.v2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty"9\x82\xd3\xe4\x93\x02,**/v2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\x98\x01\n\nPurgeQueue\x12(.google.cloud.tasks.v2.PurgeQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"B\x82\xd3\xe4\x93\x02\x35"0/v2/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\x98\x01\n\nPauseQueue\x12(.google.cloud.tasks.v2.PauseQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"B\x82\xd3\xe4\x93\x02\x35"0/v2/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\x9b\x01\n\x0bResumeQueue\x12).google.cloud.tasks.v2.ResumeQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"C\x82\xd3\xe4\x93\x02\x36"1/v2/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\x9c\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"Q\x82\xd3\xe4\x93\x02@";/v2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa3\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"X\x82\xd3\xe4\x93\x02@";/v2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xce\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"c\x82\xd3\xe4\x93\x02\x46"A/v2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xa3\x01\n\tListTasks\x12\'.google.cloud.tasks.v2.ListTasksRequest\x1a(.google.cloud.tasks.v2.ListTasksResponse"C\x82\xd3\xe4\x93\x02\x34\x12\x32/v2/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x90\x01\n\x07GetTask\x12%.google.cloud.tasks.v2.GetTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"A\x82\xd3\xe4\x93\x02\x34\x12\x32/v2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xa0\x01\n\nCreateTask\x12(.google.cloud.tasks.v2.CreateTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"K\x82\xd3\xe4\x93\x02\x37"2/v2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x91\x01\n\nDeleteTask\x12(.google.cloud.tasks.v2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"A\x82\xd3\xe4\x93\x02\x34*2/v2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\x97\x01\n\x07RunTask\x12%.google.cloud.tasks.v2.RunTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"H\x82\xd3\xe4\x93\x02;"6/v2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBr\n\x19\x63om.google.cloud.tasks.v2B\x0f\x43loudTasksProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks\xa2\x02\x05TASKSb\x06proto3' + '\n,google/cloud/tasks_v2/proto/cloudtasks.proto\x12\x15google.cloud.tasks.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\'google/cloud/tasks_v2/proto/queue.proto\x1a&google/cloud/tasks_v2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x83\x01\n\x11ListQueuesRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"[\n\x12ListQueuesResponse\x12,\n\x06queues\x18\x01 \x03(\x0b\x32\x1c.google.cloud.tasks.v2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x7f\n\x12\x43reateQueueRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x30\n\x05queue\x18\x02 \x01(\x0b\x32\x1c.google.cloud.tasks.v2.QueueB\x03\xe0\x41\x02"w\n\x12UpdateQueueRequest\x12\x30\n\x05queue\x18\x01 \x01(\x0b\x32\x1c.google.cloud.tasks.v2.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xaa\x01\n\x10ListTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"X\n\x11ListTasksResponse\x12*\n\x05tasks\x18\x01 \x03(\x0b\x32\x1b.google.cloud.tasks.v2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x7f\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"\xb4\x01\n\x11\x43reateTaskRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12.\n\x04task\x18\x02 \x01(\x0b\x32\x1b.google.cloud.tasks.v2.TaskB\x03\xe0\x41\x02\x12\x37\n\rresponse_view\x18\x03 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\x7f\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View2\xdd\x14\n\nCloudTasks\x12\x9e\x01\n\nListQueues\x12(.google.cloud.tasks.v2.ListQueuesRequest\x1a).google.cloud.tasks.v2.ListQueuesResponse";\x82\xd3\xe4\x93\x02,\x12*/v2/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x8b\x01\n\x08GetQueue\x12&.google.cloud.tasks.v2.GetQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"9\x82\xd3\xe4\x93\x02,\x12*/v2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa0\x01\n\x0b\x43reateQueue\x12).google.cloud.tasks.v2.CreateQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"H\x82\xd3\xe4\x93\x02\x33"*/v2/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xab\x01\n\x0bUpdateQueue\x12).google.cloud.tasks.v2.UpdateQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"S\x82\xd3\xe4\x93\x02\x39\x32\x30/v2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x8b\x01\n\x0b\x44\x65leteQueue\x12).google.cloud.tasks.v2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty"9\x82\xd3\xe4\x93\x02,**/v2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\x98\x01\n\nPurgeQueue\x12(.google.cloud.tasks.v2.PurgeQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"B\x82\xd3\xe4\x93\x02\x35"0/v2/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\x98\x01\n\nPauseQueue\x12(.google.cloud.tasks.v2.PauseQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"B\x82\xd3\xe4\x93\x02\x35"0/v2/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\x9b\x01\n\x0bResumeQueue\x12).google.cloud.tasks.v2.ResumeQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"C\x82\xd3\xe4\x93\x02\x36"1/v2/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\x9c\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"Q\x82\xd3\xe4\x93\x02@";/v2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa3\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"X\x82\xd3\xe4\x93\x02@";/v2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xce\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"c\x82\xd3\xe4\x93\x02\x46"A/v2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xa3\x01\n\tListTasks\x12\'.google.cloud.tasks.v2.ListTasksRequest\x1a(.google.cloud.tasks.v2.ListTasksResponse"C\x82\xd3\xe4\x93\x02\x34\x12\x32/v2/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x90\x01\n\x07GetTask\x12%.google.cloud.tasks.v2.GetTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"A\x82\xd3\xe4\x93\x02\x34\x12\x32/v2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xa0\x01\n\nCreateTask\x12(.google.cloud.tasks.v2.CreateTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"K\x82\xd3\xe4\x93\x02\x37"2/v2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x91\x01\n\nDeleteTask\x12(.google.cloud.tasks.v2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"A\x82\xd3\xe4\x93\x02\x34*2/v2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\x97\x01\n\x07RunTask\x12%.google.cloud.tasks.v2.RunTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"H\x82\xd3\xe4\x93\x02;"6/v2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBr\n\x19\x63om.google.cloud.tasks.v2B\x0f\x43loudTasksProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks\xa2\x02\x05TASKSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -79,7 +79,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A#\022!locations.googleapis.com/Location" + "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" ), file=DESCRIPTOR, ), @@ -147,7 +147,7 @@ extension_ranges=[], oneofs=[], serialized_start=391, - serialized_end=524, + serialized_end=522, ) @@ -203,8 +203,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=526, - serialized_end=617, + serialized_start=524, + serialized_end=615, ) @@ -244,8 +244,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=619, - serialized_end=691, + serialized_start=617, + serialized_end=689, ) @@ -272,7 +272,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A#\022!locations.googleapis.com/Location" + "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" ), file=DESCRIPTOR, ), @@ -303,8 +303,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=694, - serialized_end=823, + serialized_start=691, + serialized_end=818, ) @@ -360,8 +360,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=825, - serialized_end=944, + serialized_start=820, + serialized_end=939, ) @@ -401,8 +401,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=946, - serialized_end=1021, + serialized_start=941, + serialized_end=1016, ) @@ -442,8 +442,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1023, - serialized_end=1097, + serialized_start=1018, + serialized_end=1092, ) @@ -483,8 +483,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1099, - serialized_end=1173, + serialized_start=1094, + serialized_end=1168, ) @@ -524,8 +524,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1175, - serialized_end=1250, + serialized_start=1170, + serialized_end=1245, ) @@ -552,7 +552,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -619,8 +619,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1253, - serialized_end=1424, + serialized_start=1248, + serialized_end=1418, ) @@ -676,8 +676,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1426, - serialized_end=1514, + serialized_start=1420, + serialized_end=1508, ) @@ -735,8 +735,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1516, - serialized_end=1643, + serialized_start=1510, + serialized_end=1637, ) @@ -763,7 +763,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -812,8 +812,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1646, - serialized_end=1827, + serialized_start=1640, + serialized_end=1820, ) @@ -853,8 +853,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1829, - serialized_end=1902, + serialized_start=1822, + serialized_end=1895, ) @@ -912,8 +912,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1904, - serialized_end=2031, + serialized_start=1897, + serialized_end=2024, ) _LISTQUEUESRESPONSE.fields_by_name[ @@ -1441,8 +1441,8 @@ serialized_options=_b( "\312A\031cloudtasks.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=2034, - serialized_end=4687, + serialized_start=2027, + serialized_end=4680, methods=[ _descriptor.MethodDescriptor( name="ListQueues", diff --git a/tasks/google/cloud/tasks_v2/proto/queue.proto b/tasks/google/cloud/tasks_v2/proto/queue.proto index ce6a90244d7b..b50f5aec8b91 100644 --- a/tasks/google/cloud/tasks_v2/proto/queue.proto +++ b/tasks/google/cloud/tasks_v2/proto/queue.proto @@ -99,6 +99,7 @@ message Queue { // [task-level app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. // These settings apply only to // [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in this queue. + // [Http tasks][google.cloud.tasks.v2.HttpRequest] are not affected. // // If set, `app_engine_routing_override` is used for all // [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in the queue, no matter what the diff --git a/tasks/google/cloud/tasks_v2/proto/queue_pb2.py b/tasks/google/cloud/tasks_v2/proto/queue_pb2.py index e7ae7d7e2b16..282827c52b09 100644 --- a/tasks/google/cloud/tasks_v2/proto/queue_pb2.py +++ b/tasks/google/cloud/tasks_v2/proto/queue_pb2.py @@ -452,7 +452,8 @@ asks.v2.AppEngineHttpRequest.app\_engine\_routing]. These settings apply only to [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in this - queue. If set, ``app_engine_routing_override`` is used for + queue. [Http tasks][google.cloud.tasks.v2.HttpRequest] are not + affected. If set, ``app_engine_routing_override`` is used for all [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in the queue, no matter what the setting is for the [task-level app\_ diff --git a/tasks/google/cloud/tasks_v2/proto/target.proto b/tasks/google/cloud/tasks_v2/proto/target.proto index acd38fedee3a..04a88ce9ded0 100644 --- a/tasks/google/cloud/tasks_v2/proto/target.proto +++ b/tasks/google/cloud/tasks_v2/proto/target.proto @@ -17,6 +17,7 @@ syntax = "proto3"; package google.cloud.tasks.v2; +import "google/api/field_behavior.proto"; import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks"; @@ -24,6 +25,112 @@ option java_multiple_files = true; option java_outer_classname = "TargetProto"; option java_package = "com.google.cloud.tasks.v2"; +// HTTP request. +// +// The task will be pushed to the worker as an HTTP request. If the worker +// or the redirected worker acknowledges the task by returning a successful HTTP +// response code ([`200` - `299`]), the task will removed from the queue. If +// any other HTTP response code is returned or no response is received, the +// task will be retried according to the following: +// +// * User-specified throttling: [retry configuration][google.cloud.tasks.v2.Queue.retry_config], +// [rate limits][google.cloud.tasks.v2.Queue.rate_limits], and the [queue's state][google.cloud.tasks.v2.Queue.state]. +// +// * System throttling: To prevent the worker from overloading, Cloud Tasks may +// temporarily reduce the queue's effective rate. User-specified settings +// will not be changed. +// +// System throttling happens because: +// +// * Cloud Tasks backs off on all errors. Normally the backoff specified in +// [rate limits][google.cloud.tasks.v2.Queue.rate_limits] will be used. But if the worker returns +// `429` (Too Many Requests), `503` (Service Unavailable), or the rate of +// errors is high, Cloud Tasks will use a higher backoff rate. The retry +// specified in the `Retry-After` HTTP response header is considered. +// +// * To prevent traffic spikes and to smooth sudden large traffic spikes, +// dispatches ramp up slowly when the queue is newly created or idle and +// if large numbers of tasks suddenly become available to dispatch (due to +// spikes in create task rates, the queue being unpaused, or many tasks +// that are scheduled at the same time). +message HttpRequest { + // Required. The full url path that the request will be sent to. + // + // This string must begin with either "http://" or "https://". Some examples + // are: `http://acme.com` and `https://acme.com/sales:8080`. Cloud Tasks will + // encode some characters for safety and compatibility. The maximum allowed + // URL length is 2083 characters after encoding. + // + // The `Location` header response from a redirect response [`300` - `399`] + // may be followed. The redirect is not counted as a separate attempt. + string url = 1 [(google.api.field_behavior) = REQUIRED]; + + // The HTTP method to use for the request. The default is POST. + HttpMethod http_method = 2; + + // HTTP request headers. + // + // This map contains the header field names and values. + // Headers can be set when the + // [task is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + // + // These headers represent a subset of the headers that will accompany the + // task's HTTP request. Some HTTP request headers will be ignored or replaced. + // + // A partial list of headers that will be ignored or replaced is: + // + // * Host: This will be computed by Cloud Tasks and derived from + // [HttpRequest.url][google.cloud.tasks.v2.HttpRequest.url]. + // * Content-Length: This will be computed by Cloud Tasks. + // * User-Agent: This will be set to `"Google-Cloud-Tasks"`. + // * X-Google-*: Google use only. + // * X-AppEngine-*: Google use only. + // + // `Content-Type` won't be set by Cloud Tasks. You can explicitly set + // `Content-Type` to a media type when the + // [task is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + // For example, `Content-Type` can be set to `"application/octet-stream"` or + // `"application/json"`. + // + // Headers which can have multiple values (according to RFC2616) can be + // specified using comma-separated values. + // + // The size of the headers must be less than 80KB. + map headers = 3; + + // HTTP request body. + // + // A request body is allowed only if the + // [HTTP method][google.cloud.tasks.v2.HttpRequest.http_method] is POST, PUT, or PATCH. It is an + // error to set body on a task with an incompatible [HttpMethod][google.cloud.tasks.v2.HttpMethod]. + bytes body = 4; + + // The mode for generating an `Authorization` header for HTTP requests. + // + // If specified, all `Authorization` headers in the [HttpRequest.headers][google.cloud.tasks.v2.HttpRequest.headers] + // field will be overridden. + oneof authorization_header { + // If specified, an + // [OAuth token](https://developers.google.com/identity/protocols/OAuth2) + // will be generated and attached as an `Authorization` header in the HTTP + // request. + // + // This type of authorization should generally only be used when calling + // Google APIs hosted on *.googleapis.com. + OAuthToken oauth_token = 5; + + // If specified, an + // [OIDC](https://developers.google.com/identity/protocols/OpenIDConnect) + // token will be generated and attached as an `Authorization` header in the + // HTTP request. + // + // This type of authorization can be used for many scenarios, including + // calling Cloud Run, or endpoints where you intend to validate the token + // yourself. + OidcToken oidc_token = 6; + } +} + // App Engine HTTP request. // // The message defines the HTTP request that is sent to an App Engine app when @@ -278,3 +385,40 @@ enum HttpMethod { // HTTP OPTIONS OPTIONS = 7; } + +// Contains information needed for generating an +// [OAuth token](https://developers.google.com/identity/protocols/OAuth2). +// This type of authorization should generally only be used when calling Google +// APIs hosted on *.googleapis.com. +message OAuthToken { + // [Service account email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating OAuth token. + // The service account must be within the same project as the queue. The + // caller must have iam.serviceAccounts.actAs permission for the service + // account. + string service_account_email = 1; + + // OAuth scope to be used for generating OAuth access token. + // If not specified, "https://www.googleapis.com/auth/cloud-platform" + // will be used. + string scope = 2; +} + +// Contains information needed for generating an +// [OpenID Connect +// token](https://developers.google.com/identity/protocols/OpenIDConnect). +// This type of authorization can be used for many scenarios, including +// calling Cloud Run, or endpoints where you intend to validate the token +// yourself. +message OidcToken { + // [Service account email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating OIDC token. + // The service account must be within the same project as the queue. The + // caller must have iam.serviceAccounts.actAs permission for the service + // account. + string service_account_email = 1; + + // Audience to be used when generating OIDC token. If not specified, the URI + // specified in target will be used. + string audience = 2; +} diff --git a/tasks/google/cloud/tasks_v2/proto/target_pb2.py b/tasks/google/cloud/tasks_v2/proto/target_pb2.py index 0203f5333672..2f894cc62d2f 100644 --- a/tasks/google/cloud/tasks_v2/proto/target_pb2.py +++ b/tasks/google/cloud/tasks_v2/proto/target_pb2.py @@ -16,6 +16,7 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 @@ -27,9 +28,12 @@ "\n\031com.google.cloud.tasks.v2B\013TargetProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks" ), serialized_pb=_b( - '\n(google/cloud/tasks_v2/proto/target.proto\x12\x15google.cloud.tasks.v2\x1a\x1cgoogle/api/annotations.proto"\xb2\x02\n\x14\x41ppEngineHttpRequest\x12\x36\n\x0bhttp_method\x18\x01 \x01(\x0e\x32!.google.cloud.tasks.v2.HttpMethod\x12\x43\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32\'.google.cloud.tasks.v2.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12I\n\x07headers\x18\x04 \x03(\x0b\x32\x38.google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42\x66\n\x19\x63om.google.cloud.tasks.v2B\x0bTargetProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3' + '\n(google/cloud/tasks_v2/proto/target.proto\x12\x15google.cloud.tasks.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"\xe1\x02\n\x0bHttpRequest\x12\x10\n\x03url\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x36\n\x0bhttp_method\x18\x02 \x01(\x0e\x32!.google.cloud.tasks.v2.HttpMethod\x12@\n\x07headers\x18\x03 \x03(\x0b\x32/.google.cloud.tasks.v2.HttpRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12\x38\n\x0boauth_token\x18\x05 \x01(\x0b\x32!.google.cloud.tasks.v2.OAuthTokenH\x00\x12\x36\n\noidc_token\x18\x06 \x01(\x0b\x32 .google.cloud.tasks.v2.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"\xb2\x02\n\x14\x41ppEngineHttpRequest\x12\x36\n\x0bhttp_method\x18\x01 \x01(\x0e\x32!.google.cloud.tasks.v2.HttpMethod\x12\x43\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32\'.google.cloud.tasks.v2.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12I\n\x07headers\x18\x04 \x03(\x0b\x32\x38.google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42\x66\n\x19\x63om.google.cloud.tasks.v2B\x0bTargetProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + ], ) _HTTPMETHOD = _descriptor.EnumDescriptor( @@ -69,8 +73,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=492, - serialized_end=607, + serialized_start=1003, + serialized_end=1118, ) _sym_db.RegisterEnumDescriptor(_HTTPMETHOD) @@ -85,6 +89,199 @@ OPTIONS = 7 +_HTTPREQUEST_HEADERSENTRY = _descriptor.Descriptor( + name="HeadersEntry", + full_name="google.cloud.tasks.v2.HttpRequest.HeadersEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.tasks.v2.HttpRequest.HeadersEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.tasks.v2.HttpRequest.HeadersEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=414, + serialized_end=460, +) + +_HTTPREQUEST = _descriptor.Descriptor( + name="HttpRequest", + full_name="google.cloud.tasks.v2.HttpRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="url", + full_name="google.cloud.tasks.v2.HttpRequest.url", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="http_method", + full_name="google.cloud.tasks.v2.HttpRequest.http_method", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="headers", + full_name="google.cloud.tasks.v2.HttpRequest.headers", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="body", + full_name="google.cloud.tasks.v2.HttpRequest.body", + index=3, + number=4, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="oauth_token", + full_name="google.cloud.tasks.v2.HttpRequest.oauth_token", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="oidc_token", + full_name="google.cloud.tasks.v2.HttpRequest.oidc_token", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_HTTPREQUEST_HEADERSENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="authorization_header", + full_name="google.cloud.tasks.v2.HttpRequest.authorization_header", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=131, + serialized_end=484, +) + + _APPENGINEHTTPREQUEST_HEADERSENTRY = _descriptor.Descriptor( name="HeadersEntry", full_name="google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry", @@ -137,8 +334,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=358, - serialized_end=404, + serialized_start=414, + serialized_end=460, ) _APPENGINEHTTPREQUEST = _descriptor.Descriptor( @@ -247,8 +444,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=98, - serialized_end=404, + serialized_start=487, + serialized_end=793, ) @@ -340,10 +537,141 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=406, - serialized_end=490, + serialized_start=795, + serialized_end=879, +) + + +_OAUTHTOKEN = _descriptor.Descriptor( + name="OAuthToken", + full_name="google.cloud.tasks.v2.OAuthToken", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="service_account_email", + full_name="google.cloud.tasks.v2.OAuthToken.service_account_email", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="scope", + full_name="google.cloud.tasks.v2.OAuthToken.scope", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=881, + serialized_end=939, +) + + +_OIDCTOKEN = _descriptor.Descriptor( + name="OidcToken", + full_name="google.cloud.tasks.v2.OidcToken", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="service_account_email", + full_name="google.cloud.tasks.v2.OidcToken.service_account_email", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="audience", + full_name="google.cloud.tasks.v2.OidcToken.audience", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=941, + serialized_end=1001, ) +_HTTPREQUEST_HEADERSENTRY.containing_type = _HTTPREQUEST +_HTTPREQUEST.fields_by_name["http_method"].enum_type = _HTTPMETHOD +_HTTPREQUEST.fields_by_name["headers"].message_type = _HTTPREQUEST_HEADERSENTRY +_HTTPREQUEST.fields_by_name["oauth_token"].message_type = _OAUTHTOKEN +_HTTPREQUEST.fields_by_name["oidc_token"].message_type = _OIDCTOKEN +_HTTPREQUEST.oneofs_by_name["authorization_header"].fields.append( + _HTTPREQUEST.fields_by_name["oauth_token"] +) +_HTTPREQUEST.fields_by_name[ + "oauth_token" +].containing_oneof = _HTTPREQUEST.oneofs_by_name["authorization_header"] +_HTTPREQUEST.oneofs_by_name["authorization_header"].fields.append( + _HTTPREQUEST.fields_by_name["oidc_token"] +) +_HTTPREQUEST.fields_by_name[ + "oidc_token" +].containing_oneof = _HTTPREQUEST.oneofs_by_name["authorization_header"] _APPENGINEHTTPREQUEST_HEADERSENTRY.containing_type = _APPENGINEHTTPREQUEST _APPENGINEHTTPREQUEST.fields_by_name["http_method"].enum_type = _HTTPMETHOD _APPENGINEHTTPREQUEST.fields_by_name[ @@ -352,11 +680,128 @@ _APPENGINEHTTPREQUEST.fields_by_name[ "headers" ].message_type = _APPENGINEHTTPREQUEST_HEADERSENTRY +DESCRIPTOR.message_types_by_name["HttpRequest"] = _HTTPREQUEST DESCRIPTOR.message_types_by_name["AppEngineHttpRequest"] = _APPENGINEHTTPREQUEST DESCRIPTOR.message_types_by_name["AppEngineRouting"] = _APPENGINEROUTING +DESCRIPTOR.message_types_by_name["OAuthToken"] = _OAUTHTOKEN +DESCRIPTOR.message_types_by_name["OidcToken"] = _OIDCTOKEN DESCRIPTOR.enum_types_by_name["HttpMethod"] = _HTTPMETHOD _sym_db.RegisterFileDescriptor(DESCRIPTOR) +HttpRequest = _reflection.GeneratedProtocolMessageType( + "HttpRequest", + (_message.Message,), + dict( + HeadersEntry=_reflection.GeneratedProtocolMessageType( + "HeadersEntry", + (_message.Message,), + dict( + DESCRIPTOR=_HTTPREQUEST_HEADERSENTRY, + __module__="google.cloud.tasks_v2.proto.target_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.HttpRequest.HeadersEntry) + ), + ), + DESCRIPTOR=_HTTPREQUEST, + __module__="google.cloud.tasks_v2.proto.target_pb2", + __doc__="""HTTP request. + + The task will be pushed to the worker as an HTTP request. If the worker + or the redirected worker acknowledges the task by returning a successful + HTTP response code ([``200`` - ``299``]), the task will removed from the + queue. If any other HTTP response code is returned or no response is + received, the task will be retried according to the following: + + - User-specified throttling: [retry + configuration][google.cloud.tasks.v2.Queue.retry\_config], [rate + limits][google.cloud.tasks.v2.Queue.rate\_limits], and the [queue's + state][google.cloud.tasks.v2.Queue.state]. + + - System throttling: To prevent the worker from overloading, Cloud + Tasks may temporarily reduce the queue's effective rate. + User-specified settings will not be changed. + + System throttling happens because: + + - Cloud Tasks backs off on all errors. Normally the backoff specified + in [rate limits][google.cloud.tasks.v2.Queue.rate\_limits] will be + used. But if the worker returns ``429`` (Too Many Requests), ``503`` + (Service Unavailable), or the rate of errors is high, Cloud Tasks + will use a higher backoff rate. The retry specified in the + ``Retry-After`` HTTP response header is considered. + + - To prevent traffic spikes and to smooth sudden large traffic spikes, + dispatches ramp up slowly when the queue is newly created or idle and + if large numbers of tasks suddenly become available to dispatch (due + to spikes in create task rates, the queue being unpaused, or many + tasks that are scheduled at the same time). + + + Attributes: + url: + Required. The full url path that the request will be sent to. + This string must begin with either "http://" or "https://". + Some examples are: ``http://acme.com`` and + ``https://acme.com/sales:8080``. Cloud Tasks will encode some + characters for safety and compatibility. The maximum allowed + URL length is 2083 characters after encoding. The + ``Location`` header response from a redirect response [``300`` + - ``399``] may be followed. The redirect is not counted as a + separate attempt. + http_method: + The HTTP method to use for the request. The default is POST. + headers: + HTTP request headers. This map contains the header field + names and values. Headers can be set when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + These headers represent a subset of the headers that will + accompany the task's HTTP request. Some HTTP request headers + will be ignored or replaced. A partial list of headers that + will be ignored or replaced is: - Host: This will be + computed by Cloud Tasks and derived from + [HttpRequest.url][google.cloud.tasks.v2.HttpRequest.url]. - + Content-Length: This will be computed by Cloud Tasks. - User- + Agent: This will be set to ``"Google-Cloud-Tasks"``. - + X-Google-\*: Google use only. - X-AppEngine-\*: Google use + only. ``Content-Type`` won't be set by Cloud Tasks. You can + explicitly set ``Content-Type`` to a media type when the [task + is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/octet-stream"`` or ``"application/json"``. + Headers which can have multiple values (according to RFC2616) + can be specified using comma-separated values. The size of + the headers must be less than 80KB. + body: + HTTP request body. A request body is allowed only if the + [HTTP method][google.cloud.tasks.v2.HttpRequest.http\_method] + is POST, PUT, or PATCH. It is an error to set body on a task + with an incompatible + [HttpMethod][google.cloud.tasks.v2.HttpMethod]. + authorization_header: + The mode for generating an ``Authorization`` header for HTTP + requests. If specified, all ``Authorization`` headers in the + [HttpRequest.headers][google.cloud.tasks.v2.HttpRequest.header + s] field will be overridden. + oauth_token: + If specified, an `OAuth token + `_ + will be generated and attached as an ``Authorization`` header + in the HTTP request. This type of authorization should + generally only be used when calling Google APIs hosted on + \*.googleapis.com. + oidc_token: + If specified, an `OIDC `_ token will be generated and + attached as an ``Authorization`` header in the HTTP request. + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend to + validate the token yourself. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.HttpRequest) + ), +) +_sym_db.RegisterMessage(HttpRequest) +_sym_db.RegisterMessage(HttpRequest.HeadersEntry) + AppEngineHttpRequest = _reflection.GeneratedProtocolMessageType( "AppEngineHttpRequest", (_message.Message,), @@ -605,7 +1050,67 @@ ) _sym_db.RegisterMessage(AppEngineRouting) +OAuthToken = _reflection.GeneratedProtocolMessageType( + "OAuthToken", + (_message.Message,), + dict( + DESCRIPTOR=_OAUTHTOKEN, + __module__="google.cloud.tasks_v2.proto.target_pb2", + __doc__="""Contains information needed for generating an `OAuth + token `_. This + type of authorization should generally only be used when calling Google + APIs hosted on \*.googleapis.com. + + + Attributes: + service_account_email: + `Service account email + `_ to be + used for generating OAuth token. The service account must be + within the same project as the queue. The caller must have + iam.serviceAccounts.actAs permission for the service account. + scope: + OAuth scope to be used for generating OAuth access token. If + not specified, "https://www.googleapis.com/auth/cloud- + platform" will be used. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.OAuthToken) + ), +) +_sym_db.RegisterMessage(OAuthToken) + +OidcToken = _reflection.GeneratedProtocolMessageType( + "OidcToken", + (_message.Message,), + dict( + DESCRIPTOR=_OIDCTOKEN, + __module__="google.cloud.tasks_v2.proto.target_pb2", + __doc__="""Contains information needed for generating an `OpenID Connect + token `_. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the token + yourself. + + + Attributes: + service_account_email: + `Service account email + `_ to be + used for generating OIDC token. The service account must be + within the same project as the queue. The caller must have + iam.serviceAccounts.actAs permission for the service account. + audience: + Audience to be used when generating OIDC token. If not + specified, the URI specified in target will be used. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.OidcToken) + ), +) +_sym_db.RegisterMessage(OidcToken) + DESCRIPTOR._options = None +_HTTPREQUEST_HEADERSENTRY._options = None +_HTTPREQUEST.fields_by_name["url"]._options = None _APPENGINEHTTPREQUEST_HEADERSENTRY._options = None # @@protoc_insertion_point(module_scope) diff --git a/tasks/google/cloud/tasks_v2/proto/task.proto b/tasks/google/cloud/tasks_v2/proto/task.proto index a555c2c19689..70a2baba4c53 100644 --- a/tasks/google/cloud/tasks_v2/proto/task.proto +++ b/tasks/google/cloud/tasks_v2/proto/task.proto @@ -93,6 +93,11 @@ message Task { // // An App Engine task is a task that has [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] set. AppEngineHttpRequest app_engine_http_request = 2; + + // HTTP request that is sent to the worker. + // + // An HTTP task is a task that has [HttpRequest][google.cloud.tasks.v2.HttpRequest] set. + HttpRequest http_request = 3; } // The time when the task is scheduled to be attempted or retried. @@ -117,6 +122,8 @@ message Task { // // The default and maximum values depend on the type of request: // + // * For [HTTP tasks][google.cloud.tasks.v2.HttpRequest], the default is 10 minutes. The deadline + // must be in the interval [15 seconds, 30 minutes]. // // * For [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest], 0 indicates that the // request has the default deadline. The default deadline depends on the diff --git a/tasks/google/cloud/tasks_v2/proto/task_pb2.py b/tasks/google/cloud/tasks_v2/proto/task_pb2.py index 996e33c624ad..b8d6ffb30ff3 100644 --- a/tasks/google/cloud/tasks_v2/proto/task_pb2.py +++ b/tasks/google/cloud/tasks_v2/proto/task_pb2.py @@ -33,7 +33,7 @@ "\n\031com.google.cloud.tasks.v2B\tTaskProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks" ), serialized_pb=_b( - '\n&google/cloud/tasks_v2/proto/task.proto\x12\x15google.cloud.tasks.v2\x1a\x19google/api/resource.proto\x1a(google/cloud/tasks_v2/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xf8\x04\n\x04Task\x12\x0c\n\x04name\x18\x01 \x01(\t\x12N\n\x17\x61pp_engine_http_request\x18\x02 \x01(\x0b\x32+.google.cloud.tasks.v2.AppEngineHttpRequestH\x00\x12\x31\n\rschedule_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x11\x64ispatch_deadline\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x16\n\x0e\x64ispatch_count\x18\x07 \x01(\x05\x12\x16\n\x0eresponse_count\x18\x08 \x01(\x05\x12\x35\n\rfirst_attempt\x18\t \x01(\x0b\x32\x1e.google.cloud.tasks.v2.Attempt\x12\x34\n\x0clast_attempt\x18\n \x01(\x0b\x32\x1e.google.cloud.tasks.v2.Attempt\x12.\n\x04view\x18\x0b \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"1\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x08\n\x04\x46ULL\x10\x02:h\xea\x41\x65\n\x1e\x63loudtasks.googleapis.com/Task\x12\x43projects/{project}/locations/{location}/queues/{queue}/tasks/{task}B\x0e\n\x0cmessage_type"\xcf\x01\n\x07\x41ttempt\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rdispatch_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rresponse_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x0fresponse_status\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusBd\n\x19\x63om.google.cloud.tasks.v2B\tTaskProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3' + '\n&google/cloud/tasks_v2/proto/task.proto\x12\x15google.cloud.tasks.v2\x1a\x19google/api/resource.proto\x1a(google/cloud/tasks_v2/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xb4\x05\n\x04Task\x12\x0c\n\x04name\x18\x01 \x01(\t\x12N\n\x17\x61pp_engine_http_request\x18\x02 \x01(\x0b\x32+.google.cloud.tasks.v2.AppEngineHttpRequestH\x00\x12:\n\x0chttp_request\x18\x03 \x01(\x0b\x32".google.cloud.tasks.v2.HttpRequestH\x00\x12\x31\n\rschedule_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x11\x64ispatch_deadline\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x16\n\x0e\x64ispatch_count\x18\x07 \x01(\x05\x12\x16\n\x0eresponse_count\x18\x08 \x01(\x05\x12\x35\n\rfirst_attempt\x18\t \x01(\x0b\x32\x1e.google.cloud.tasks.v2.Attempt\x12\x34\n\x0clast_attempt\x18\n \x01(\x0b\x32\x1e.google.cloud.tasks.v2.Attempt\x12.\n\x04view\x18\x0b \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"1\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x08\n\x04\x46ULL\x10\x02:h\xea\x41\x65\n\x1e\x63loudtasks.googleapis.com/Task\x12\x43projects/{project}/locations/{location}/queues/{queue}/tasks/{task}B\x0e\n\x0cmessage_type"\xcf\x01\n\x07\x41ttempt\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rdispatch_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rresponse_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x0fresponse_status\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusBd\n\x19\x63om.google.cloud.tasks.v2B\tTaskProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3' ), dependencies=[ google_dot_api_dot_resource__pb2.DESCRIPTOR, @@ -68,8 +68,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=716, - serialized_end=765, + serialized_start=776, + serialized_end=825, ) _sym_db.RegisterEnumDescriptor(_TASK_VIEW) @@ -117,10 +117,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="http_request", + full_name="google.cloud.tasks.v2.Task.http_request", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="schedule_time", full_name="google.cloud.tasks.v2.Task.schedule_time", - index=2, + index=3, number=4, type=11, cpp_type=10, @@ -138,7 +156,7 @@ _descriptor.FieldDescriptor( name="create_time", full_name="google.cloud.tasks.v2.Task.create_time", - index=3, + index=4, number=5, type=11, cpp_type=10, @@ -156,7 +174,7 @@ _descriptor.FieldDescriptor( name="dispatch_deadline", full_name="google.cloud.tasks.v2.Task.dispatch_deadline", - index=4, + index=5, number=6, type=11, cpp_type=10, @@ -174,7 +192,7 @@ _descriptor.FieldDescriptor( name="dispatch_count", full_name="google.cloud.tasks.v2.Task.dispatch_count", - index=5, + index=6, number=7, type=5, cpp_type=1, @@ -192,7 +210,7 @@ _descriptor.FieldDescriptor( name="response_count", full_name="google.cloud.tasks.v2.Task.response_count", - index=6, + index=7, number=8, type=5, cpp_type=1, @@ -210,7 +228,7 @@ _descriptor.FieldDescriptor( name="first_attempt", full_name="google.cloud.tasks.v2.Task.first_attempt", - index=7, + index=8, number=9, type=11, cpp_type=10, @@ -228,7 +246,7 @@ _descriptor.FieldDescriptor( name="last_attempt", full_name="google.cloud.tasks.v2.Task.last_attempt", - index=8, + index=9, number=10, type=11, cpp_type=10, @@ -246,7 +264,7 @@ _descriptor.FieldDescriptor( name="view", full_name="google.cloud.tasks.v2.Task.view", - index=9, + index=10, number=11, type=14, cpp_type=8, @@ -281,7 +299,7 @@ ) ], serialized_start=255, - serialized_end=887, + serialized_end=947, ) @@ -373,8 +391,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=1097, + serialized_start=950, + serialized_end=1157, ) _TASK.fields_by_name[ @@ -382,6 +400,9 @@ ].message_type = ( google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2._APPENGINEHTTPREQUEST ) +_TASK.fields_by_name[ + "http_request" +].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2._HTTPREQUEST _TASK.fields_by_name[ "schedule_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP @@ -401,6 +422,10 @@ _TASK.fields_by_name["app_engine_http_request"].containing_oneof = _TASK.oneofs_by_name[ "message_type" ] +_TASK.oneofs_by_name["message_type"].fields.append(_TASK.fields_by_name["http_request"]) +_TASK.fields_by_name["http_request"].containing_oneof = _TASK.oneofs_by_name[ + "message_type" +] _ATTEMPT.fields_by_name[ "schedule_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP @@ -453,6 +478,10 @@ HTTP request that is sent to the App Engine app handler. An App Engine task is a task that has [AppEngineHttpRequest][goog le.cloud.tasks.v2.AppEngineHttpRequest] set. + http_request: + HTTP request that is sent to the worker. An HTTP task is a + task that has [HttpRequest][google.cloud.tasks.v2.HttpRequest] + set. schedule_time: The time when the task is scheduled to be attempted or retried. ``schedule_time`` will be truncated to the nearest @@ -470,7 +499,10 @@ for the response, but whether the worker stops processing depends on the worker. For example, if the worker is stuck, it may not react to cancelled requests. The default and maximum - values depend on the type of request: - For [App Engine + values depend on the type of request: - For [HTTP + tasks][google.cloud.tasks.v2.HttpRequest], the default is + 10 minutes. The deadline must be in the interval [15 seconds, + 30 minutes]. - For [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest], 0 indicates that the request has the default deadline. The default deadline depends on the `scaling type diff --git a/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py b/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py index cdfdf40b3511..651a67a0c228 100644 --- a/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py +++ b/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py @@ -287,13 +287,13 @@ def list_queues( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. @@ -373,13 +373,13 @@ def get_queue( name (str): Required. The resource name of the queue. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. @@ -466,13 +466,13 @@ def create_queue( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.Queue` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. @@ -565,13 +565,13 @@ def update_queue( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. @@ -648,13 +648,13 @@ def delete_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -720,13 +720,13 @@ def purge_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. @@ -794,13 +794,13 @@ def pause_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. @@ -873,13 +873,13 @@ def resume_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. @@ -956,13 +956,13 @@ def get_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Policy` instance. @@ -1049,13 +1049,13 @@ def set_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Policy` instance. @@ -1133,13 +1133,13 @@ def test_iam_permissions( information see `IAM Overview `__. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.TestIamPermissionsResponse` instance. @@ -1225,7 +1225,7 @@ def list_tasks( Args: parent (str): Required. The queue name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - response_view (~google.cloud.tasks_v2beta2.types.View): The response\_view specifies which subset of the ``Task`` will be + response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be returned. By default response\_view is ``BASIC``; not all information is retrieved @@ -1242,13 +1242,13 @@ def list_tasks( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. @@ -1328,7 +1328,7 @@ def get_task( Args: name (str): Required. The task name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (~google.cloud.tasks_v2beta2.types.View): The response\_view specifies which subset of the ``Task`` will be + response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be returned. By default response\_view is ``BASIC``; not all information is retrieved @@ -1340,13 +1340,13 @@ def get_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. @@ -1454,7 +1454,7 @@ def create_task( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.Task` - response_view (~google.cloud.tasks_v2beta2.types.View): The response\_view specifies which subset of the ``Task`` will be + response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be returned. By default response\_view is ``BASIC``; not all information is retrieved @@ -1466,13 +1466,13 @@ def create_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. @@ -1542,13 +1542,13 @@ def delete_task( name (str): Required. The task name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -1655,7 +1655,7 @@ def lease_tasks( The maximum total size of a ``lease tasks response`` is 32 MB. If the sum of all task sizes requested reaches this limit, fewer tasks than requested are returned. - response_view (~google.cloud.tasks_v2beta2.types.View): The response\_view specifies which subset of the ``Task`` will be + response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be returned. By default response\_view is ``BASIC``; not all information is retrieved @@ -1693,13 +1693,13 @@ def lease_tasks( UTF-8 encoded can't be used in the ``filter`` and the task's ``tag`` will be displayed as empty in Cloud Tasks. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.LeaseTasksResponse` instance. @@ -1788,13 +1788,13 @@ def acknowledge_task( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.Timestamp` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -1883,7 +1883,7 @@ def renew_lease( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.Duration` - response_view (~google.cloud.tasks_v2beta2.types.View): The response\_view specifies which subset of the ``Task`` will be + response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be returned. By default response\_view is ``BASIC``; not all information is retrieved @@ -1895,13 +1895,13 @@ def renew_lease( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. @@ -1985,7 +1985,7 @@ def cancel_lease( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.Timestamp` - response_view (~google.cloud.tasks_v2beta2.types.View): The response\_view specifies which subset of the ``Task`` will be + response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be returned. By default response\_view is ``BASIC``; not all information is retrieved @@ -1997,13 +1997,13 @@ def cancel_lease( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. @@ -2091,7 +2091,7 @@ def run_task( Args: name (str): Required. The task name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (~google.cloud.tasks_v2beta2.types.View): The response\_view specifies which subset of the ``Task`` will be + response_view (~google.cloud.tasks_v2beta2.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be returned. By default response\_view is ``BASIC``; not all information is retrieved @@ -2103,13 +2103,13 @@ def run_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. diff --git a/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks.proto b/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks.proto index 79274fc37291..afea6919a625 100644 --- a/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks.proto +++ b/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks.proto @@ -396,7 +396,7 @@ message ListQueuesRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "locations.googleapis.com/Location" + child_type: "cloudtasks.googleapis.com/Queue" } ]; @@ -473,7 +473,7 @@ message CreateQueueRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "locations.googleapis.com/Location" + child_type: "cloudtasks.googleapis.com/Queue" } ]; @@ -555,7 +555,7 @@ message ListTasksRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; @@ -645,7 +645,7 @@ message CreateTaskRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; @@ -719,7 +719,7 @@ message LeaseTasksRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; diff --git a/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py b/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py index b071f420cb7f..7d5555815a35 100644 --- a/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py +++ b/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py @@ -41,7 +41,7 @@ "\n\036com.google.cloud.tasks.v2beta2B\017CloudTasksProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks\242\002\005TASKS" ), serialized_pb=_b( - '\n1google/cloud/tasks_v2beta2/proto/cloudtasks.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/tasks_v2beta2/proto/queue.proto\x1a+google/cloud/tasks_v2beta2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x85\x01\n\x11ListQueuesRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!locations.googleapis.com/Location\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"`\n\x12ListQueuesResponse\x12\x31\n\x06queues\x18\x01 \x03(\x0b\x32!.google.cloud.tasks.v2beta2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x86\x01\n\x12\x43reateQueueRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!locations.googleapis.com/Location\x12\x35\n\x05queue\x18\x02 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.QueueB\x03\xe0\x41\x02"|\n\x12UpdateQueueRequest\x12\x35\n\x05queue\x18\x01 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xb0\x01\n\x10ListTasksRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"]\n\x11ListTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\xbf\x01\n\x11\x43reateTaskRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x33\n\x04task\x18\x02 \x01(\x0b\x32 .google.cloud.tasks.v2beta2.TaskB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\xe5\x01\n\x11LeaseTasksRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x11\n\tmax_tasks\x18\x02 \x01(\x05\x12\x36\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t"E\n\x12LeaseTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task"\x86\x01\n\x16\x41\x63knowledgeTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02"\xf7\x01\n\x11RenewLeaseRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x36\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\xc0\x01\n\x12\x43\x61ncelLeaseRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\x84\x01\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View2\xd4\x1c\n\nCloudTasks\x12\xad\x01\n\nListQueues\x12-.google.cloud.tasks.v2beta2.ListQueuesRequest\x1a..google.cloud.tasks.v2beta2.ListQueuesResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x9a\x01\n\x08GetQueue\x12+.google.cloud.tasks.v2beta2.GetQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue">\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xaf\x01\n\x0b\x43reateQueue\x12..google.cloud.tasks.v2beta2.CreateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"M\x82\xd3\xe4\x93\x02\x38"//v2beta2/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xba\x01\n\x0bUpdateQueue\x12..google.cloud.tasks.v2beta2.UpdateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"X\x82\xd3\xe4\x93\x02>25/v2beta2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x95\x01\n\x0b\x44\x65leteQueue\x12..google.cloud.tasks.v2beta2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v2beta2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa7\x01\n\nPurgeQueue\x12-.google.cloud.tasks.v2beta2.PurgeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta2/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\xa7\x01\n\nPauseQueue\x12-.google.cloud.tasks.v2beta2.PauseQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta2/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\xaa\x01\n\x0bResumeQueue\x12..google.cloud.tasks.v2beta2.ResumeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"H\x82\xd3\xe4\x93\x02;"6/v2beta2/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\xa1\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"V\x82\xd3\xe4\x93\x02\x45"@/v2beta2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa8\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"]\x82\xd3\xe4\x93\x02\x45"@/v2beta2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xd3\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"h\x82\xd3\xe4\x93\x02K"F/v2beta2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xb2\x01\n\tListTasks\x12,.google.cloud.tasks.v2beta2.ListTasksRequest\x1a-.google.cloud.tasks.v2beta2.ListTasksResponse"H\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x9f\x01\n\x07GetTask\x12*.google.cloud.tasks.v2beta2.GetTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"F\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xaf\x01\n\nCreateTask\x12-.google.cloud.tasks.v2beta2.CreateTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"P\x82\xd3\xe4\x93\x02<"7/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x9b\x01\n\nDeleteTask\x12-.google.cloud.tasks.v2beta2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02\x39*7/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xcd\x01\n\nLeaseTasks\x12-.google.cloud.tasks.v2beta2.LeaseTasksRequest\x1a..google.cloud.tasks.v2beta2.LeaseTasksResponse"`\x82\xd3\xe4\x93\x02\x42"=/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:lease:\x01*\xda\x41\x15parent,lease_duration\x12\xc2\x01\n\x0f\x41\x63knowledgeTask\x12\x32.google.cloud.tasks.v2beta2.AcknowledgeTaskRequest\x1a\x16.google.protobuf.Empty"c\x82\xd3\xe4\x93\x02H"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:acknowledge:\x01*\xda\x41\x12name,schedule_time\x12\xd0\x01\n\nRenewLease\x12-.google.cloud.tasks.v2beta2.RenewLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task"q\x82\xd3\xe4\x93\x02G"B/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:renewLease:\x01*\xda\x41!name,schedule_time,lease_duration\x12\xc4\x01\n\x0b\x43\x61ncelLease\x12..google.cloud.tasks.v2beta2.CancelLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task"c\x82\xd3\xe4\x93\x02H"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:cancelLease:\x01*\xda\x41\x12name,schedule_time\x12\xa6\x01\n\x07RunTask\x12*.google.cloud.tasks.v2beta2.RunTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"M\x82\xd3\xe4\x93\x02@";/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB|\n\x1e\x63om.google.cloud.tasks.v2beta2B\x0f\x43loudTasksProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks\xa2\x02\x05TASKSb\x06proto3' + '\n1google/cloud/tasks_v2beta2/proto/cloudtasks.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/tasks_v2beta2/proto/queue.proto\x1a+google/cloud/tasks_v2beta2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x83\x01\n\x11ListQueuesRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"`\n\x12ListQueuesResponse\x12\x31\n\x06queues\x18\x01 \x03(\x0b\x32!.google.cloud.tasks.v2beta2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x84\x01\n\x12\x43reateQueueRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x35\n\x05queue\x18\x02 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.QueueB\x03\xe0\x41\x02"|\n\x12UpdateQueueRequest\x12\x35\n\x05queue\x18\x01 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xaf\x01\n\x10ListTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"]\n\x11ListTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\xbe\x01\n\x11\x43reateTaskRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x33\n\x04task\x18\x02 \x01(\x0b\x32 .google.cloud.tasks.v2beta2.TaskB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\xe4\x01\n\x11LeaseTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x11\n\tmax_tasks\x18\x02 \x01(\x05\x12\x36\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t"E\n\x12LeaseTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task"\x86\x01\n\x16\x41\x63knowledgeTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02"\xf7\x01\n\x11RenewLeaseRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x36\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\xc0\x01\n\x12\x43\x61ncelLeaseRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\x84\x01\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View2\xd4\x1c\n\nCloudTasks\x12\xad\x01\n\nListQueues\x12-.google.cloud.tasks.v2beta2.ListQueuesRequest\x1a..google.cloud.tasks.v2beta2.ListQueuesResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x9a\x01\n\x08GetQueue\x12+.google.cloud.tasks.v2beta2.GetQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue">\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xaf\x01\n\x0b\x43reateQueue\x12..google.cloud.tasks.v2beta2.CreateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"M\x82\xd3\xe4\x93\x02\x38"//v2beta2/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xba\x01\n\x0bUpdateQueue\x12..google.cloud.tasks.v2beta2.UpdateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"X\x82\xd3\xe4\x93\x02>25/v2beta2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x95\x01\n\x0b\x44\x65leteQueue\x12..google.cloud.tasks.v2beta2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v2beta2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa7\x01\n\nPurgeQueue\x12-.google.cloud.tasks.v2beta2.PurgeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta2/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\xa7\x01\n\nPauseQueue\x12-.google.cloud.tasks.v2beta2.PauseQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta2/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\xaa\x01\n\x0bResumeQueue\x12..google.cloud.tasks.v2beta2.ResumeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"H\x82\xd3\xe4\x93\x02;"6/v2beta2/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\xa1\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"V\x82\xd3\xe4\x93\x02\x45"@/v2beta2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa8\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"]\x82\xd3\xe4\x93\x02\x45"@/v2beta2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xd3\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"h\x82\xd3\xe4\x93\x02K"F/v2beta2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xb2\x01\n\tListTasks\x12,.google.cloud.tasks.v2beta2.ListTasksRequest\x1a-.google.cloud.tasks.v2beta2.ListTasksResponse"H\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x9f\x01\n\x07GetTask\x12*.google.cloud.tasks.v2beta2.GetTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"F\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xaf\x01\n\nCreateTask\x12-.google.cloud.tasks.v2beta2.CreateTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"P\x82\xd3\xe4\x93\x02<"7/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x9b\x01\n\nDeleteTask\x12-.google.cloud.tasks.v2beta2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02\x39*7/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xcd\x01\n\nLeaseTasks\x12-.google.cloud.tasks.v2beta2.LeaseTasksRequest\x1a..google.cloud.tasks.v2beta2.LeaseTasksResponse"`\x82\xd3\xe4\x93\x02\x42"=/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:lease:\x01*\xda\x41\x15parent,lease_duration\x12\xc2\x01\n\x0f\x41\x63knowledgeTask\x12\x32.google.cloud.tasks.v2beta2.AcknowledgeTaskRequest\x1a\x16.google.protobuf.Empty"c\x82\xd3\xe4\x93\x02H"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:acknowledge:\x01*\xda\x41\x12name,schedule_time\x12\xd0\x01\n\nRenewLease\x12-.google.cloud.tasks.v2beta2.RenewLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task"q\x82\xd3\xe4\x93\x02G"B/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:renewLease:\x01*\xda\x41!name,schedule_time,lease_duration\x12\xc4\x01\n\x0b\x43\x61ncelLease\x12..google.cloud.tasks.v2beta2.CancelLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task"c\x82\xd3\xe4\x93\x02H"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:cancelLease:\x01*\xda\x41\x12name,schedule_time\x12\xa6\x01\n\x07RunTask\x12*.google.cloud.tasks.v2beta2.RunTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"M\x82\xd3\xe4\x93\x02@";/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB|\n\x1e\x63om.google.cloud.tasks.v2beta2B\x0f\x43loudTasksProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks\xa2\x02\x05TASKSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -83,7 +83,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A#\022!locations.googleapis.com/Location" + "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" ), file=DESCRIPTOR, ), @@ -151,7 +151,7 @@ extension_ranges=[], oneofs=[], serialized_start=476, - serialized_end=609, + serialized_end=607, ) @@ -207,8 +207,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=611, - serialized_end=707, + serialized_start=609, + serialized_end=705, ) @@ -248,8 +248,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=709, - serialized_end=781, + serialized_start=707, + serialized_end=779, ) @@ -276,7 +276,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A#\022!locations.googleapis.com/Location" + "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" ), file=DESCRIPTOR, ), @@ -307,8 +307,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=784, - serialized_end=918, + serialized_start=782, + serialized_end=914, ) @@ -364,8 +364,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=920, - serialized_end=1044, + serialized_start=916, + serialized_end=1040, ) @@ -405,8 +405,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1046, - serialized_end=1121, + serialized_start=1042, + serialized_end=1117, ) @@ -446,8 +446,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1123, - serialized_end=1197, + serialized_start=1119, + serialized_end=1193, ) @@ -487,8 +487,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1199, - serialized_end=1273, + serialized_start=1195, + serialized_end=1269, ) @@ -528,8 +528,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1275, - serialized_end=1350, + serialized_start=1271, + serialized_end=1346, ) @@ -556,7 +556,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -623,8 +623,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1353, - serialized_end=1529, + serialized_start=1349, + serialized_end=1524, ) @@ -680,8 +680,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1531, - serialized_end=1624, + serialized_start=1526, + serialized_end=1619, ) @@ -739,8 +739,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1627, - serialized_end=1759, + serialized_start=1622, + serialized_end=1754, ) @@ -767,7 +767,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -816,8 +816,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1762, - serialized_end=1953, + serialized_start=1757, + serialized_end=1947, ) @@ -857,8 +857,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1955, - serialized_end=2028, + serialized_start=1949, + serialized_end=2022, ) @@ -885,7 +885,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -970,8 +970,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2031, - serialized_end=2260, + serialized_start=2025, + serialized_end=2253, ) @@ -1009,8 +1009,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2262, - serialized_end=2331, + serialized_start=2255, + serialized_end=2324, ) @@ -1068,8 +1068,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2334, - serialized_end=2468, + serialized_start=2327, + serialized_end=2461, ) @@ -1163,8 +1163,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2471, - serialized_end=2718, + serialized_start=2464, + serialized_end=2711, ) @@ -1240,8 +1240,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2721, - serialized_end=2913, + serialized_start=2714, + serialized_end=2906, ) @@ -1299,8 +1299,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2916, - serialized_end=3048, + serialized_start=2909, + serialized_end=3041, ) _LISTQUEUESRESPONSE.fields_by_name[ @@ -2092,8 +2092,8 @@ serialized_options=_b( "\312A\031cloudtasks.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=3051, - serialized_end=6719, + serialized_start=3044, + serialized_end=6712, methods=[ _descriptor.MethodDescriptor( name="ListQueues", diff --git a/tasks/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py b/tasks/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py index 33a080ad7459..b018df9b510e 100644 --- a/tasks/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py +++ b/tasks/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py @@ -285,13 +285,13 @@ def list_queues( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. @@ -371,13 +371,13 @@ def get_queue( name (str): Required. The resource name of the queue. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. @@ -473,13 +473,13 @@ def create_queue( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta3.types.Queue` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. @@ -581,13 +581,13 @@ def update_queue( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta3.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. @@ -664,13 +664,13 @@ def delete_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -736,13 +736,13 @@ def purge_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. @@ -810,13 +810,13 @@ def pause_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. @@ -889,13 +889,13 @@ def resume_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. @@ -972,13 +972,13 @@ def get_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta3.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Policy` instance. @@ -1065,13 +1065,13 @@ def set_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta3.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Policy` instance. @@ -1149,13 +1149,13 @@ def test_iam_permissions( information see `IAM Overview `__. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.TestIamPermissionsResponse` instance. @@ -1241,7 +1241,7 @@ def list_tasks( Args: parent (str): Required. The queue name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` - response_view (~google.cloud.tasks_v2beta3.types.View): The response\_view specifies which subset of the ``Task`` will be + response_view (~google.cloud.tasks_v2beta3.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be returned. By default response\_view is ``BASIC``; not all information is retrieved @@ -1258,13 +1258,13 @@ def list_tasks( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. @@ -1344,7 +1344,7 @@ def get_task( Args: name (str): Required. The task name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (~google.cloud.tasks_v2beta3.types.View): The response\_view specifies which subset of the ``Task`` will be + response_view (~google.cloud.tasks_v2beta3.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be returned. By default response\_view is ``BASIC``; not all information is retrieved @@ -1356,13 +1356,13 @@ def get_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Task` instance. @@ -1469,7 +1469,7 @@ def create_task( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta3.types.Task` - response_view (~google.cloud.tasks_v2beta3.types.View): The response\_view specifies which subset of the ``Task`` will be + response_view (~google.cloud.tasks_v2beta3.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be returned. By default response\_view is ``BASIC``; not all information is retrieved @@ -1481,13 +1481,13 @@ def create_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Task` instance. @@ -1557,13 +1557,13 @@ def delete_task( name (str): Required. The task name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -1644,7 +1644,7 @@ def run_task( Args: name (str): Required. The task name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` - response_view (~google.cloud.tasks_v2beta3.types.View): The response\_view specifies which subset of the ``Task`` will be + response_view (~google.cloud.tasks_v2beta3.enums.Task.View): The response\_view specifies which subset of the ``Task`` will be returned. By default response\_view is ``BASIC``; not all information is retrieved @@ -1656,13 +1656,13 @@ def run_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Task` instance. diff --git a/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks.proto b/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks.proto index 431d5a73fc7a..711c7a616674 100644 --- a/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks.proto +++ b/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks.proto @@ -311,7 +311,7 @@ message ListQueuesRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "locations.googleapis.com/Location" + child_type: "cloudtasks.googleapis.com/Queue" } ]; @@ -388,7 +388,7 @@ message CreateQueueRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "locations.googleapis.com/Location" + child_type: "cloudtasks.googleapis.com/Queue" } ]; @@ -470,7 +470,7 @@ message ListTasksRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; @@ -560,7 +560,7 @@ message CreateTaskRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; diff --git a/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2.py b/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2.py index 45decbad7bfb..051f5a6ef8c9 100644 --- a/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2.py +++ b/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2.py @@ -39,7 +39,7 @@ "\n\036com.google.cloud.tasks.v2beta3B\017CloudTasksProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasks\242\002\005TASKS" ), serialized_pb=_b( - '\n1google/cloud/tasks_v2beta3/proto/cloudtasks.proto\x12\x1agoogle.cloud.tasks.v2beta3\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/tasks_v2beta3/proto/queue.proto\x1a+google/cloud/tasks_v2beta3/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x85\x01\n\x11ListQueuesRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!locations.googleapis.com/Location\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"`\n\x12ListQueuesResponse\x12\x31\n\x06queues\x18\x01 \x03(\x0b\x32!.google.cloud.tasks.v2beta3.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x86\x01\n\x12\x43reateQueueRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!locations.googleapis.com/Location\x12\x35\n\x05queue\x18\x02 \x01(\x0b\x32!.google.cloud.tasks.v2beta3.QueueB\x03\xe0\x41\x02"|\n\x12UpdateQueueRequest\x12\x35\n\x05queue\x18\x01 \x01(\x0b\x32!.google.cloud.tasks.v2beta3.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xb0\x01\n\x10ListTasksRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"]\n\x11ListTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta3.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View"\xbf\x01\n\x11\x43reateTaskRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x33\n\x04task\x18\x02 \x01(\x0b\x32 .google.cloud.tasks.v2beta3.TaskB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\x84\x01\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View2\xa5\x16\n\nCloudTasks\x12\xad\x01\n\nListQueues\x12-.google.cloud.tasks.v2beta3.ListQueuesRequest\x1a..google.cloud.tasks.v2beta3.ListQueuesResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v2beta3/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x9a\x01\n\x08GetQueue\x12+.google.cloud.tasks.v2beta3.GetQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue">\x82\xd3\xe4\x93\x02\x31\x12//v2beta3/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xaf\x01\n\x0b\x43reateQueue\x12..google.cloud.tasks.v2beta3.CreateQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"M\x82\xd3\xe4\x93\x02\x38"//v2beta3/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xba\x01\n\x0bUpdateQueue\x12..google.cloud.tasks.v2beta3.UpdateQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"X\x82\xd3\xe4\x93\x02>25/v2beta3/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x95\x01\n\x0b\x44\x65leteQueue\x12..google.cloud.tasks.v2beta3.DeleteQueueRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v2beta3/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa7\x01\n\nPurgeQueue\x12-.google.cloud.tasks.v2beta3.PurgeQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta3/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\xa7\x01\n\nPauseQueue\x12-.google.cloud.tasks.v2beta3.PauseQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta3/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\xaa\x01\n\x0bResumeQueue\x12..google.cloud.tasks.v2beta3.ResumeQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"H\x82\xd3\xe4\x93\x02;"6/v2beta3/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\xa1\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"V\x82\xd3\xe4\x93\x02\x45"@/v2beta3/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa8\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"]\x82\xd3\xe4\x93\x02\x45"@/v2beta3/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xd3\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"h\x82\xd3\xe4\x93\x02K"F/v2beta3/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xb2\x01\n\tListTasks\x12,.google.cloud.tasks.v2beta3.ListTasksRequest\x1a-.google.cloud.tasks.v2beta3.ListTasksResponse"H\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x9f\x01\n\x07GetTask\x12*.google.cloud.tasks.v2beta3.GetTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"F\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xaf\x01\n\nCreateTask\x12-.google.cloud.tasks.v2beta3.CreateTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"P\x82\xd3\xe4\x93\x02<"7/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x9b\x01\n\nDeleteTask\x12-.google.cloud.tasks.v2beta3.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02\x39*7/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xa6\x01\n\x07RunTask\x12*.google.cloud.tasks.v2beta3.RunTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"M\x82\xd3\xe4\x93\x02@";/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB|\n\x1e\x63om.google.cloud.tasks.v2beta3B\x0f\x43loudTasksProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasks\xa2\x02\x05TASKSb\x06proto3' + '\n1google/cloud/tasks_v2beta3/proto/cloudtasks.proto\x12\x1agoogle.cloud.tasks.v2beta3\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/tasks_v2beta3/proto/queue.proto\x1a+google/cloud/tasks_v2beta3/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x83\x01\n\x11ListQueuesRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"`\n\x12ListQueuesResponse\x12\x31\n\x06queues\x18\x01 \x03(\x0b\x32!.google.cloud.tasks.v2beta3.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x84\x01\n\x12\x43reateQueueRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x35\n\x05queue\x18\x02 \x01(\x0b\x32!.google.cloud.tasks.v2beta3.QueueB\x03\xe0\x41\x02"|\n\x12UpdateQueueRequest\x12\x35\n\x05queue\x18\x01 \x01(\x0b\x32!.google.cloud.tasks.v2beta3.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xaf\x01\n\x10ListTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"]\n\x11ListTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta3.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View"\xbe\x01\n\x11\x43reateTaskRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x33\n\x04task\x18\x02 \x01(\x0b\x32 .google.cloud.tasks.v2beta3.TaskB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\x84\x01\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View2\xa5\x16\n\nCloudTasks\x12\xad\x01\n\nListQueues\x12-.google.cloud.tasks.v2beta3.ListQueuesRequest\x1a..google.cloud.tasks.v2beta3.ListQueuesResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v2beta3/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x9a\x01\n\x08GetQueue\x12+.google.cloud.tasks.v2beta3.GetQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue">\x82\xd3\xe4\x93\x02\x31\x12//v2beta3/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xaf\x01\n\x0b\x43reateQueue\x12..google.cloud.tasks.v2beta3.CreateQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"M\x82\xd3\xe4\x93\x02\x38"//v2beta3/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xba\x01\n\x0bUpdateQueue\x12..google.cloud.tasks.v2beta3.UpdateQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"X\x82\xd3\xe4\x93\x02>25/v2beta3/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x95\x01\n\x0b\x44\x65leteQueue\x12..google.cloud.tasks.v2beta3.DeleteQueueRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v2beta3/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa7\x01\n\nPurgeQueue\x12-.google.cloud.tasks.v2beta3.PurgeQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta3/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\xa7\x01\n\nPauseQueue\x12-.google.cloud.tasks.v2beta3.PauseQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta3/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\xaa\x01\n\x0bResumeQueue\x12..google.cloud.tasks.v2beta3.ResumeQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"H\x82\xd3\xe4\x93\x02;"6/v2beta3/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\xa1\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"V\x82\xd3\xe4\x93\x02\x45"@/v2beta3/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa8\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"]\x82\xd3\xe4\x93\x02\x45"@/v2beta3/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xd3\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"h\x82\xd3\xe4\x93\x02K"F/v2beta3/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xb2\x01\n\tListTasks\x12,.google.cloud.tasks.v2beta3.ListTasksRequest\x1a-.google.cloud.tasks.v2beta3.ListTasksResponse"H\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x9f\x01\n\x07GetTask\x12*.google.cloud.tasks.v2beta3.GetTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"F\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xaf\x01\n\nCreateTask\x12-.google.cloud.tasks.v2beta3.CreateTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"P\x82\xd3\xe4\x93\x02<"7/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x9b\x01\n\nDeleteTask\x12-.google.cloud.tasks.v2beta3.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02\x39*7/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xa6\x01\n\x07RunTask\x12*.google.cloud.tasks.v2beta3.RunTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"M\x82\xd3\xe4\x93\x02@";/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB|\n\x1e\x63om.google.cloud.tasks.v2beta3B\x0f\x43loudTasksProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasks\xa2\x02\x05TASKSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -79,7 +79,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A#\022!locations.googleapis.com/Location" + "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" ), file=DESCRIPTOR, ), @@ -147,7 +147,7 @@ extension_ranges=[], oneofs=[], serialized_start=411, - serialized_end=544, + serialized_end=542, ) @@ -203,8 +203,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=546, - serialized_end=642, + serialized_start=544, + serialized_end=640, ) @@ -244,8 +244,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=644, - serialized_end=716, + serialized_start=642, + serialized_end=714, ) @@ -272,7 +272,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A#\022!locations.googleapis.com/Location" + "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" ), file=DESCRIPTOR, ), @@ -303,8 +303,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=719, - serialized_end=853, + serialized_start=717, + serialized_end=849, ) @@ -360,8 +360,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=855, - serialized_end=979, + serialized_start=851, + serialized_end=975, ) @@ -401,8 +401,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=981, - serialized_end=1056, + serialized_start=977, + serialized_end=1052, ) @@ -442,8 +442,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1058, - serialized_end=1132, + serialized_start=1054, + serialized_end=1128, ) @@ -483,8 +483,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1134, - serialized_end=1208, + serialized_start=1130, + serialized_end=1204, ) @@ -524,8 +524,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1210, - serialized_end=1285, + serialized_start=1206, + serialized_end=1281, ) @@ -552,7 +552,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -619,8 +619,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1288, - serialized_end=1464, + serialized_start=1284, + serialized_end=1459, ) @@ -676,8 +676,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1466, - serialized_end=1559, + serialized_start=1461, + serialized_end=1554, ) @@ -735,8 +735,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1562, - serialized_end=1694, + serialized_start=1557, + serialized_end=1689, ) @@ -763,7 +763,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -812,8 +812,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1697, - serialized_end=1888, + serialized_start=1692, + serialized_end=1882, ) @@ -853,8 +853,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1890, - serialized_end=1963, + serialized_start=1884, + serialized_end=1957, ) @@ -912,8 +912,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1966, - serialized_end=2098, + serialized_start=1960, + serialized_end=2092, ) _LISTQUEUESRESPONSE.fields_by_name[ @@ -1442,8 +1442,8 @@ serialized_options=_b( "\312A\031cloudtasks.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=2101, - serialized_end=4954, + serialized_start=2095, + serialized_end=4948, methods=[ _descriptor.MethodDescriptor( name="ListQueues", diff --git a/tasks/setup.py b/tasks/setup.py index 3b7afc41b737..900009501442 100644 --- a/tasks/setup.py +++ b/tasks/setup.py @@ -21,7 +21,7 @@ name = "google-cloud-tasks" description = "Cloud Tasks API API client library" -version = "1.2.1" +version = "1.3.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", diff --git a/tasks/synth.metadata b/tasks/synth.metadata index 86ec1727e79d..ef9b977ad277 100644 --- a/tasks/synth.metadata +++ b/tasks/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-21T12:39:33.815307Z", + "updateTime": "2019-11-02T12:36:27.240345Z", "sources": [ { "generator": { "name": "artman", - "version": "0.34.0", - "dockerImage": "googleapis/artman@sha256:38a27ba6245f96c3e86df7acb2ebcc33b4f186d9e475efe2d64303aec3d4e0ea" + "version": "0.41.0", + "dockerImage": "googleapis/artman@sha256:75b38a3b073a7b243545f2332463096624c802bb1e56b8cb6f22ba1ecd325fa9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "11592a15391951348a64f5c303399733b1c5b3b2", - "internalRef": "264425502" + "sha": "aac770126e2def40dcc387f50e8007b21c869e58", + "internalRef": "278016738" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/tasks/synth.py b/tasks/synth.py index a5eeca136543..4307a63bbfa1 100644 --- a/tasks/synth.py +++ b/tasks/synth.py @@ -82,6 +82,35 @@ "\g<1>... }\n", ) +# Fix enum docstring references +s.replace( + "google/cloud/**/cloud_tasks_client.py", + "types\.View", + "enums.Task.View") + +# Change wording of optional params to disambiguate +# client library request methods from Cloud Task requests +s.replace("google/cloud/**/*.py", +""" retry \(Optional\[google\.api_core\.retry\.Retry\]\): A retry object used + to retry requests\. If ``None`` is specified, requests will + be retried using a default configuration\. + timeout \(Optional\[float\]\): The amount of time, in seconds, to wait + for the request to complete\. Note that if ``retry`` is + specified, the timeout applies to each individual attempt\. + metadata \(Optional\[Sequence\[Tuple\[str, str\]\]\]\): Additional metadata + that is provided to the method\. + +""", +""" retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the client library request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the client library method. + +""") # ---------------------------------------------------------------------------- # Add templated files diff --git a/test_utils/test_utils/vpcsc_config.py b/test_utils/test_utils/vpcsc_config.py new file mode 100644 index 000000000000..36b15d6be991 --- /dev/null +++ b/test_utils/test_utils/vpcsc_config.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + + +INSIDE_VPCSC_ENVVAR = "GOOGLE_CLOUD_TESTS_IN_VPCSC" +PROJECT_INSIDE_ENVVAR = "PROJECT_ID" +PROJECT_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT" +BUCKET_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_BUCKET" + + +class VPCSCTestConfig(object): + """System test utility for VPCSC detection. + + See: https://cloud.google.com/vpc-service-controls/docs/ + """ + + @property + def inside_vpcsc(self): + """Test whether the test environment is configured to run inside VPCSC. + + Returns: + bool: + true if the environment is configured to run inside VPCSC, + else false. + """ + return INSIDE_VPCSC_ENVVAR in os.environ + + @property + def project_inside(self): + """Project ID for testing outside access. + + Returns: + str: project ID used for testing outside access; None if undefined. + """ + return os.environ.get(PROJECT_INSIDE_ENVVAR, None) + + @property + def project_outside(self): + """Project ID for testing inside access. + + Returns: + str: project ID used for testing inside access; None if undefined. + """ + return os.environ.get(PROJECT_OUTSIDE_ENVVAR, None) + + @property + def bucket_outside(self): + """GCS bucket for testing inside access. + + Returns: + str: bucket ID used for testing inside access; None if undefined. + """ + return os.environ.get(BUCKET_OUTSIDE_ENVVAR, None) + + def skip_if_inside_vpcsc(self, testcase): + """Test decorator: skip if running inside VPCSC.""" + reason = ( + "Running inside VPCSC. " + "Unset the {} environment variable to enable this test." + ).format(INSIDE_VPCSC_ENVVAR) + skip = pytest.mark.skipif(self.inside_vpcsc, reason=reason) + return skip(testcase) + + def skip_unless_inside_vpcsc(self, testcase): + """Test decorator: skip if running outside VPCSC.""" + reason = ( + "Running outside VPCSC. " + "Set the {} environment variable to enable this test." + ).format(INSIDE_VPCSC_ENVVAR) + skip = pytest.mark.skipif(not self.inside_vpcsc, reason=reason) + return skip(testcase) + + def skip_unless_inside_project(self, testcase): + """Test decorator: skip if inside project env var not set.""" + reason = ( + "Project ID for running inside VPCSC not set. " + "Set the {} environment variable to enable this test." + ).format(PROJECT_INSIDE_ENVVAR) + skip = pytest.mark.skipif(self.project_inside is None, reason=reason) + return skip(testcase) + + def skip_unless_outside_project(self, testcase): + """Test decorator: skip if outside project env var not set.""" + reason = ( + "Project ID for running outside VPCSC not set. " + "Set the {} environment variable to enable this test." + ).format(PROJECT_OUTSIDE_ENVVAR) + skip = pytest.mark.skipif(self.project_outside is None, reason=reason) + return skip(testcase) + + def skip_unless_outside_bucket(self, testcase): + """Test decorator: skip if outside bucket env var not set.""" + reason = ( + "Bucket ID for running outside VPCSC not set. " + "Set the {} environment variable to enable this test." + ).format(BUCKET_OUTSIDE_ENVVAR) + skip = pytest.mark.skipif(self.bucket_outside is None, reason=reason) + return skip(testcase) + + +vpcsc_config = VPCSCTestConfig() diff --git a/texttospeech/docs/conf.py b/texttospeech/docs/conf.py index 337c7da9b6fd..31c845f24010 100644 --- a/texttospeech/docs/conf.py +++ b/texttospeech/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/texttospeech/google/cloud/texttospeech_v1/gapic/enums.py b/texttospeech/google/cloud/texttospeech_v1/gapic/enums.py index 97736e3a0080..3ac941cd65d6 100644 --- a/texttospeech/google/cloud/texttospeech_v1/gapic/enums.py +++ b/texttospeech/google/cloud/texttospeech_v1/gapic/enums.py @@ -28,7 +28,7 @@ class AudioEncoding(enum.IntEnum): AUDIO_ENCODING_UNSPECIFIED (int): Not specified. Will return result ``google.rpc.Code.INVALID_ARGUMENT``. LINEAR16 (int): Uncompressed 16-bit signed little-endian samples (Linear PCM). Audio content returned as LINEAR16 also contains a WAV header. - MP3 (int): MP3 audio. + MP3 (int): MP3 audio at 32kbps. OGG_OPUS (int): Opus encoded audio wrapped in an ogg container. The result will be a file which can be played natively on Android, and in browsers (at least Chrome and Firefox). The quality of the encoding is considerably higher diff --git a/texttospeech/google/cloud/texttospeech_v1/gapic/text_to_speech_client.py b/texttospeech/google/cloud/texttospeech_v1/gapic/text_to_speech_client.py index 658ac0ec72d1..868398f228ae 100644 --- a/texttospeech/google/cloud/texttospeech_v1/gapic/text_to_speech_client.py +++ b/texttospeech/google/cloud/texttospeech_v1/gapic/text_to_speech_client.py @@ -200,7 +200,7 @@ def list_voices( >>> response = client.list_voices() Args: - language_code (str): Optional (but recommended) + language_code (str): Optional. Recommended. `BCP-47 `__ language tag. If specified, the ListVoices call will only return voices that can be used to synthesize this language\_code. E.g. when specifying "en-NZ", diff --git a/texttospeech/google/cloud/texttospeech_v1/gapic/text_to_speech_client_config.py b/texttospeech/google/cloud/texttospeech_v1/gapic/text_to_speech_client_config.py index 91bb80823f63..5c86cb227911 100644 --- a/texttospeech/google/cloud/texttospeech_v1/gapic/text_to_speech_client_config.py +++ b/texttospeech/google/cloud/texttospeech_v1/gapic/text_to_speech_client_config.py @@ -18,13 +18,13 @@ }, "methods": { "ListVoices": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "SynthesizeSpeech": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, }, diff --git a/texttospeech/google/cloud/texttospeech_v1/proto/cloud_tts.proto b/texttospeech/google/cloud/texttospeech_v1/proto/cloud_tts.proto index dd9fa6ce221a..6263da4ab085 100644 --- a/texttospeech/google/cloud/texttospeech_v1/proto/cloud_tts.proto +++ b/texttospeech/google/cloud/texttospeech_v1/proto/cloud_tts.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,6 +18,8 @@ syntax = "proto3"; package google.cloud.texttospeech.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.TextToSpeech.V1"; @@ -29,27 +31,31 @@ option php_namespace = "Google\\Cloud\\TextToSpeech\\V1"; // Service that implements Google Cloud Text-to-Speech API. service TextToSpeech { + option (google.api.default_host) = "texttospeech.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Returns a list of Voice supported for synthesis. rpc ListVoices(ListVoicesRequest) returns (ListVoicesResponse) { option (google.api.http) = { get: "/v1/voices" }; + option (google.api.method_signature) = "language_code"; } // Synthesizes speech synchronously: receive results after all text input // has been processed. - rpc SynthesizeSpeech(SynthesizeSpeechRequest) - returns (SynthesizeSpeechResponse) { + rpc SynthesizeSpeech(SynthesizeSpeechRequest) returns (SynthesizeSpeechResponse) { option (google.api.http) = { post: "/v1/text:synthesize" body: "*" }; + option (google.api.method_signature) = "input,voice,audio_config"; } } // The top-level message sent by the client for the `ListVoices` method. message ListVoicesRequest { - // Optional (but recommended) + // Optional. Recommended. // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. If // specified, the ListVoices call will only return voices that can be used to // synthesize this language_code. E.g. when specifying "en-NZ", you will get @@ -57,7 +63,47 @@ message ListVoicesRequest { // "no-*" (Norwegian) and "nb-*" (Norwegian Bokmal) voices; specifying "zh" // will also get supported "cmn-*" voices; specifying "zh-hk" will also get // supported "yue-*" voices. - string language_code = 1; + string language_code = 1 [(google.api.field_behavior) = OPTIONAL]; +} + +// Gender of the voice as described in +// [SSML voice element](https://www.w3.org/TR/speech-synthesis11/#edef_voice). +enum SsmlVoiceGender { + // An unspecified gender. + // In VoiceSelectionParams, this means that the client doesn't care which + // gender the selected voice will have. In the Voice field of + // ListVoicesResponse, this may mean that the voice doesn't fit any of the + // other categories in this enum, or that the gender of the voice isn't known. + SSML_VOICE_GENDER_UNSPECIFIED = 0; + + // A male voice. + MALE = 1; + + // A female voice. + FEMALE = 2; + + // A gender-neutral voice. + NEUTRAL = 3; +} + +// Configuration to set up audio encoder. The encoding determines the output +// audio format that we'd like. +enum AudioEncoding { + // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][]. + AUDIO_ENCODING_UNSPECIFIED = 0; + + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + // Audio content returned as LINEAR16 also contains a WAV header. + LINEAR16 = 1; + + // MP3 audio at 32kbps. + MP3 = 2; + + // Opus encoded audio wrapped in an ogg container. The result will be a + // file which can be played natively on Android, and in browsers (at least + // Chrome and Firefox). The quality of the encoding is considerably higher + // than MP3 while using approximately the same bitrate. + OGG_OPUS = 3; } // The message returned to the client by the `ListVoices` method. @@ -86,13 +132,13 @@ message Voice { // The top-level message sent by the client for the `SynthesizeSpeech` method. message SynthesizeSpeechRequest { // Required. The Synthesizer requires either plain text or SSML as input. - SynthesisInput input = 1; + SynthesisInput input = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The desired voice of the synthesized audio. - VoiceSelectionParams voice = 2; + VoiceSelectionParams voice = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The configuration of the synthesized audio. - AudioConfig audio_config = 3; + AudioConfig audio_config = 3 [(google.api.field_behavior) = REQUIRED]; } // Contains text input to be synthesized. Either `text` or `ssml` must be @@ -115,9 +161,9 @@ message SynthesisInput { // Description of which voice to use for a synthesis request. message VoiceSelectionParams { - // The language (and optionally also the region) of the voice expressed as a + // Required. The language (and potentially also the region) of the voice expressed as a // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag, e.g. - // "en-US". Required. This should not include a script tag (e.g. use + // "en-US". This should not include a script tag (e.g. use // "cmn-cn" rather than "cmn-Hant-cn"), because the script will be inferred // from the input provided in the SynthesisInput. The TTS service // will use this parameter to help choose an appropriate voice. Note that @@ -126,13 +172,13 @@ message VoiceSelectionParams { // (e.g. using en-US rather than en-CA if there isn't a Canadian voice // available), or even a different language, e.g. using "nb" (Norwegian // Bokmal) instead of "no" (Norwegian)". - string language_code = 1; + string language_code = 1 [(google.api.field_behavior) = REQUIRED]; - // The name of the voice. Optional; if not set, the service will choose a + // The name of the voice. If not set, the service will choose a // voice based on the other parameters such as language_code and gender. string name = 2; - // The preferred gender of the voice. Optional; if not set, the service will + // The preferred gender of the voice. If not set, the service will // choose a voice based on the other parameters such as language_code and // name. Note that this is only a preference, not requirement; if a // voice of the appropriate gender is not available, the synthesizer should @@ -142,94 +188,66 @@ message VoiceSelectionParams { // Description of audio data to be synthesized. message AudioConfig { - // Required. The format of the requested audio byte stream. - AudioEncoding audio_encoding = 1; - - // Optional speaking rate/speed, in the range [0.25, 4.0]. 1.0 is the normal - // native speed supported by the specific voice. 2.0 is twice as fast, and - // 0.5 is half as fast. If unset(0.0), defaults to the native 1.0 speed. Any - // other values < 0.25 or > 4.0 will return an error. - double speaking_rate = 2; - - // Optional speaking pitch, in the range [-20.0, 20.0]. 20 means increase 20 - // semitones from the original pitch. -20 means decrease 20 semitones from the - // original pitch. - double pitch = 3; - - // Optional volume gain (in dB) of the normal native volume supported by the - // specific voice, in the range [-96.0, 16.0]. If unset, or set to a value of - // 0.0 (dB), will play at normal native signal amplitude. A value of -6.0 (dB) - // will play at approximately half the amplitude of the normal native signal - // amplitude. A value of +6.0 (dB) will play at approximately twice the - // amplitude of the normal native signal amplitude. Strongly recommend not to - // exceed +10 (dB) as there's usually no effective increase in loudness for - // any value greater than that. - double volume_gain_db = 4; - - // The synthesis sample rate (in hertz) for this audio. Optional. If this is - // different from the voice's natural sample rate, then the synthesizer will - // honor this request by converting to the desired sample rate (which might - // result in worse audio quality), unless the specified sample rate is not - // supported for the encoding chosen, in which case it will fail the request - // and return [google.rpc.Code.INVALID_ARGUMENT][]. - int32 sample_rate_hertz = 5; - - // An identifier which selects 'audio effects' profiles that are applied on - // (post synthesized) text to speech. - // Effects are applied on top of each other in the order they are given. - // See - // - // [audio-profiles](https: - // //cloud.google.com/text-to-speech/docs/audio-profiles) - // for current supported profile ids. - repeated string effects_profile_id = 6; + // Required. The format of the audio byte stream. + AudioEncoding audio_encoding = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Input only. Speaking rate/speed, in the range [0.25, 4.0]. 1.0 is + // the normal native speed supported by the specific voice. 2.0 is twice as + // fast, and 0.5 is half as fast. If unset(0.0), defaults to the native 1.0 + // speed. Any other values < 0.25 or > 4.0 will return an error. + double speaking_rate = 2 [ + (google.api.field_behavior) = INPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; + + // Optional. Input only. Speaking pitch, in the range [-20.0, 20.0]. 20 means + // increase 20 semitones from the original pitch. -20 means decrease 20 + // semitones from the original pitch. + double pitch = 3 [ + (google.api.field_behavior) = INPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; + + // Optional. Input only. Volume gain (in dB) of the normal native volume + // supported by the specific voice, in the range [-96.0, 16.0]. If unset, or + // set to a value of 0.0 (dB), will play at normal native signal amplitude. A + // value of -6.0 (dB) will play at approximately half the amplitude of the + // normal native signal amplitude. A value of +6.0 (dB) will play at + // approximately twice the amplitude of the normal native signal amplitude. + // Strongly recommend not to exceed +10 (dB) as there's usually no effective + // increase in loudness for any value greater than that. + double volume_gain_db = 4 [ + (google.api.field_behavior) = INPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; + + // Optional. The synthesis sample rate (in hertz) for this audio. When this is + // specified in SynthesizeSpeechRequest, if this is different from the voice's + // natural sample rate, then the synthesizer will honor this request by + // converting to the desired sample rate (which might result in worse audio + // quality), unless the specified sample rate is not supported for the + // encoding chosen, in which case it will fail the request and return + // [google.rpc.Code.INVALID_ARGUMENT][]. + int32 sample_rate_hertz = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Input only. An identifier which selects 'audio effects' profiles + // that are applied on (post synthesized) text to speech. Effects are applied + // on top of each other in the order they are given. See + // [audio + // profiles](https://cloud.google.com/text-to-speech/docs/audio-profiles) for + // current supported profile ids. + repeated string effects_profile_id = 6 [ + (google.api.field_behavior) = INPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; } // The message returned to the client by the `SynthesizeSpeech` method. message SynthesizeSpeechResponse { // The audio data bytes encoded as specified in the request, including the - // header (For LINEAR16 audio, we include the WAV header). Note: as + // header for encodings that are wrapped in containers (e.g. MP3, OGG_OPUS). + // For LINEAR16 audio, we include the WAV header. Note: as // with all bytes fields, protobuffers use a pure binary representation, // whereas JSON representations use base64. bytes audio_content = 1; } - -// Gender of the voice as described in -// [SSML voice element](https://www.w3.org/TR/speech-synthesis11/#edef_voice). -enum SsmlVoiceGender { - // An unspecified gender. - // In VoiceSelectionParams, this means that the client doesn't care which - // gender the selected voice will have. In the Voice field of - // ListVoicesResponse, this may mean that the voice doesn't fit any of the - // other categories in this enum, or that the gender of the voice isn't known. - SSML_VOICE_GENDER_UNSPECIFIED = 0; - - // A male voice. - MALE = 1; - - // A female voice. - FEMALE = 2; - - // A gender-neutral voice. - NEUTRAL = 3; -} - -// Configuration to set up audio encoder. The encoding determines the output -// audio format that we'd like. -enum AudioEncoding { - // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][]. - AUDIO_ENCODING_UNSPECIFIED = 0; - - // Uncompressed 16-bit signed little-endian samples (Linear PCM). - // Audio content returned as LINEAR16 also contains a WAV header. - LINEAR16 = 1; - - // MP3 audio. - MP3 = 2; - - // Opus encoded audio wrapped in an ogg container. The result will be a - // file which can be played natively on Android, and in browsers (at least - // Chrome and Firefox). The quality of the encoding is considerably higher - // than MP3 while using approximately the same bitrate. - OGG_OPUS = 3; -} diff --git a/texttospeech/google/cloud/texttospeech_v1/proto/cloud_tts_pb2.py b/texttospeech/google/cloud/texttospeech_v1/proto/cloud_tts_pb2.py index 1c8e163d5dc0..24af440d65b6 100644 --- a/texttospeech/google/cloud/texttospeech_v1/proto/cloud_tts_pb2.py +++ b/texttospeech/google/cloud/texttospeech_v1/proto/cloud_tts_pb2.py @@ -17,6 +17,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -27,9 +29,13 @@ "\n com.google.cloud.texttospeech.v1B\021TextToSpeechProtoP\001ZHgoogle.golang.org/genproto/googleapis/cloud/texttospeech/v1;texttospeech\370\001\001\252\002\034Google.Cloud.TextToSpeech.V1\312\002\034Google\\Cloud\\TextToSpeech\\V1" ), serialized_pb=_b( - '\n2google/cloud/texttospeech_v1/proto/cloud_tts.proto\x12\x1cgoogle.cloud.texttospeech.v1\x1a\x1cgoogle/api/annotations.proto"*\n\x11ListVoicesRequest\x12\x15\n\rlanguage_code\x18\x01 \x01(\t"I\n\x12ListVoicesResponse\x12\x33\n\x06voices\x18\x01 \x03(\x0b\x32#.google.cloud.texttospeech.v1.Voice"\x94\x01\n\x05Voice\x12\x16\n\x0elanguage_codes\x18\x01 \x03(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x42\n\x0bssml_gender\x18\x03 \x01(\x0e\x32-.google.cloud.texttospeech.v1.SsmlVoiceGender\x12!\n\x19natural_sample_rate_hertz\x18\x04 \x01(\x05"\xda\x01\n\x17SynthesizeSpeechRequest\x12;\n\x05input\x18\x01 \x01(\x0b\x32,.google.cloud.texttospeech.v1.SynthesisInput\x12\x41\n\x05voice\x18\x02 \x01(\x0b\x32\x32.google.cloud.texttospeech.v1.VoiceSelectionParams\x12?\n\x0c\x61udio_config\x18\x03 \x01(\x0b\x32).google.cloud.texttospeech.v1.AudioConfig"@\n\x0eSynthesisInput\x12\x0e\n\x04text\x18\x01 \x01(\tH\x00\x12\x0e\n\x04ssml\x18\x02 \x01(\tH\x00\x42\x0e\n\x0cinput_source"\x7f\n\x14VoiceSelectionParams\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x42\n\x0bssml_gender\x18\x03 \x01(\x0e\x32-.google.cloud.texttospeech.v1.SsmlVoiceGender"\xc7\x01\n\x0b\x41udioConfig\x12\x43\n\x0e\x61udio_encoding\x18\x01 \x01(\x0e\x32+.google.cloud.texttospeech.v1.AudioEncoding\x12\x15\n\rspeaking_rate\x18\x02 \x01(\x01\x12\r\n\x05pitch\x18\x03 \x01(\x01\x12\x16\n\x0evolume_gain_db\x18\x04 \x01(\x01\x12\x19\n\x11sample_rate_hertz\x18\x05 \x01(\x05\x12\x1a\n\x12\x65\x66\x66\x65\x63ts_profile_id\x18\x06 \x03(\t"1\n\x18SynthesizeSpeechResponse\x12\x15\n\raudio_content\x18\x01 \x01(\x0c*W\n\x0fSsmlVoiceGender\x12!\n\x1dSSML_VOICE_GENDER_UNSPECIFIED\x10\x00\x12\x08\n\x04MALE\x10\x01\x12\n\n\x06\x46\x45MALE\x10\x02\x12\x0b\n\x07NEUTRAL\x10\x03*T\n\rAudioEncoding\x12\x1e\n\x1a\x41UDIO_ENCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x07\n\x03MP3\x10\x02\x12\x0c\n\x08OGG_OPUS\x10\x03\x32\xb8\x02\n\x0cTextToSpeech\x12\x83\x01\n\nListVoices\x12/.google.cloud.texttospeech.v1.ListVoicesRequest\x1a\x30.google.cloud.texttospeech.v1.ListVoicesResponse"\x12\x82\xd3\xe4\x93\x02\x0c\x12\n/v1/voices\x12\xa1\x01\n\x10SynthesizeSpeech\x12\x35.google.cloud.texttospeech.v1.SynthesizeSpeechRequest\x1a\x36.google.cloud.texttospeech.v1.SynthesizeSpeechResponse"\x1e\x82\xd3\xe4\x93\x02\x18"\x13/v1/text:synthesize:\x01*B\xc2\x01\n com.google.cloud.texttospeech.v1B\x11TextToSpeechProtoP\x01ZHgoogle.golang.org/genproto/googleapis/cloud/texttospeech/v1;texttospeech\xf8\x01\x01\xaa\x02\x1cGoogle.Cloud.TextToSpeech.V1\xca\x02\x1cGoogle\\Cloud\\TextToSpeech\\V1b\x06proto3' + '\n2google/cloud/texttospeech_v1/proto/cloud_tts.proto\x12\x1cgoogle.cloud.texttospeech.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto"/\n\x11ListVoicesRequest\x12\x1a\n\rlanguage_code\x18\x01 \x01(\tB\x03\xe0\x41\x01"I\n\x12ListVoicesResponse\x12\x33\n\x06voices\x18\x01 \x03(\x0b\x32#.google.cloud.texttospeech.v1.Voice"\x94\x01\n\x05Voice\x12\x16\n\x0elanguage_codes\x18\x01 \x03(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x42\n\x0bssml_gender\x18\x03 \x01(\x0e\x32-.google.cloud.texttospeech.v1.SsmlVoiceGender\x12!\n\x19natural_sample_rate_hertz\x18\x04 \x01(\x05"\xe9\x01\n\x17SynthesizeSpeechRequest\x12@\n\x05input\x18\x01 \x01(\x0b\x32,.google.cloud.texttospeech.v1.SynthesisInputB\x03\xe0\x41\x02\x12\x46\n\x05voice\x18\x02 \x01(\x0b\x32\x32.google.cloud.texttospeech.v1.VoiceSelectionParamsB\x03\xe0\x41\x02\x12\x44\n\x0c\x61udio_config\x18\x03 \x01(\x0b\x32).google.cloud.texttospeech.v1.AudioConfigB\x03\xe0\x41\x02"@\n\x0eSynthesisInput\x12\x0e\n\x04text\x18\x01 \x01(\tH\x00\x12\x0e\n\x04ssml\x18\x02 \x01(\tH\x00\x42\x0e\n\x0cinput_source"\x84\x01\n\x14VoiceSelectionParams\x12\x1a\n\rlanguage_code\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x42\n\x0bssml_gender\x18\x03 \x01(\x0e\x32-.google.cloud.texttospeech.v1.SsmlVoiceGender"\xf1\x01\n\x0b\x41udioConfig\x12H\n\x0e\x61udio_encoding\x18\x01 \x01(\x0e\x32+.google.cloud.texttospeech.v1.AudioEncodingB\x03\xe0\x41\x02\x12\x1d\n\rspeaking_rate\x18\x02 \x01(\x01\x42\x06\xe0\x41\x04\xe0\x41\x01\x12\x15\n\x05pitch\x18\x03 \x01(\x01\x42\x06\xe0\x41\x04\xe0\x41\x01\x12\x1e\n\x0evolume_gain_db\x18\x04 \x01(\x01\x42\x06\xe0\x41\x04\xe0\x41\x01\x12\x1e\n\x11sample_rate_hertz\x18\x05 \x01(\x05\x42\x03\xe0\x41\x01\x12"\n\x12\x65\x66\x66\x65\x63ts_profile_id\x18\x06 \x03(\tB\x06\xe0\x41\x04\xe0\x41\x01"1\n\x18SynthesizeSpeechResponse\x12\x15\n\raudio_content\x18\x01 \x01(\x0c*W\n\x0fSsmlVoiceGender\x12!\n\x1dSSML_VOICE_GENDER_UNSPECIFIED\x10\x00\x12\x08\n\x04MALE\x10\x01\x12\n\n\x06\x46\x45MALE\x10\x02\x12\x0b\n\x07NEUTRAL\x10\x03*T\n\rAudioEncoding\x12\x1e\n\x1a\x41UDIO_ENCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x07\n\x03MP3\x10\x02\x12\x0c\n\x08OGG_OPUS\x10\x03\x32\xb4\x03\n\x0cTextToSpeech\x12\x93\x01\n\nListVoices\x12/.google.cloud.texttospeech.v1.ListVoicesRequest\x1a\x30.google.cloud.texttospeech.v1.ListVoicesResponse""\x82\xd3\xe4\x93\x02\x0c\x12\n/v1/voices\xda\x41\rlanguage_code\x12\xbc\x01\n\x10SynthesizeSpeech\x12\x35.google.cloud.texttospeech.v1.SynthesizeSpeechRequest\x1a\x36.google.cloud.texttospeech.v1.SynthesizeSpeechResponse"9\x82\xd3\xe4\x93\x02\x18"\x13/v1/text:synthesize:\x01*\xda\x41\x18input,voice,audio_config\x1aO\xca\x41\x1btexttospeech.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xc2\x01\n com.google.cloud.texttospeech.v1B\x11TextToSpeechProtoP\x01ZHgoogle.golang.org/genproto/googleapis/cloud/texttospeech/v1;texttospeech\xf8\x01\x01\xaa\x02\x1cGoogle.Cloud.TextToSpeech.V1\xca\x02\x1cGoogle\\Cloud\\TextToSpeech\\V1b\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + ], ) _SSMLVOICEGENDER = _descriptor.EnumDescriptor( @@ -57,8 +63,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1053, - serialized_end=1140, + serialized_start=1179, + serialized_end=1266, ) _sym_db.RegisterEnumDescriptor(_SSMLVOICEGENDER) @@ -88,8 +94,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1142, - serialized_end=1226, + serialized_start=1268, + serialized_end=1352, ) _sym_db.RegisterEnumDescriptor(_AUDIOENCODING) @@ -126,7 +132,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -138,8 +144,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=114, - serialized_end=156, + serialized_start=172, + serialized_end=219, ) @@ -177,8 +183,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=158, - serialized_end=231, + serialized_start=221, + serialized_end=294, ) @@ -270,8 +276,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=234, - serialized_end=382, + serialized_start=297, + serialized_end=445, ) @@ -297,7 +303,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -315,7 +321,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -333,7 +339,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -345,8 +351,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=385, - serialized_end=603, + serialized_start=448, + serialized_end=681, ) @@ -410,8 +416,8 @@ fields=[], ) ], - serialized_start=605, - serialized_end=669, + serialized_start=683, + serialized_end=747, ) @@ -437,7 +443,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -485,8 +491,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=671, - serialized_end=798, + serialized_start=750, + serialized_end=882, ) @@ -512,7 +518,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -530,7 +536,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\004\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -548,7 +554,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\004\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -566,7 +572,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\004\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -584,7 +590,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -602,7 +608,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\004\340A\001"), file=DESCRIPTOR, ), ], @@ -614,8 +620,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=801, - serialized_end=1000, + serialized_start=885, + serialized_end=1126, ) @@ -653,8 +659,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1002, - serialized_end=1051, + serialized_start=1128, + serialized_end=1177, ) _LISTVOICESRESPONSE.fields_by_name["voices"].message_type = _VOICE @@ -699,7 +705,7 @@ Attributes: language_code: - Optional (but recommended) `BCP-47 `__ language tag. If specified, the ListVoices call will only return voices that can be used to synthesize this language\_code. E.g. when specifying "en- @@ -821,30 +827,30 @@ Attributes: language_code: - The language (and optionally also the region) of the voice - expressed as a `BCP-47 `__ language tag, e.g. "en-US". - Required. This should not include a script tag (e.g. use "cmn- - cn" rather than "cmn-Hant-cn"), because the script will be - inferred from the input provided in the SynthesisInput. The - TTS service will use this parameter to help choose an - appropriate voice. Note that the TTS service may choose a - voice with a slightly different language code than the one - selected; it may substitute a different region (e.g. using en- - US rather than en-CA if there isn't a Canadian voice - available), or even a different language, e.g. using "nb" - (Norwegian Bokmal) instead of "no" (Norwegian)". + This should not include a script tag (e.g. use "cmn-cn" rather + than "cmn-Hant-cn"), because the script will be inferred from + the input provided in the SynthesisInput. The TTS service will + use this parameter to help choose an appropriate voice. Note + that the TTS service may choose a voice with a slightly + different language code than the one selected; it may + substitute a different region (e.g. using en-US rather than + en-CA if there isn't a Canadian voice available), or even a + different language, e.g. using "nb" (Norwegian Bokmal) instead + of "no" (Norwegian)". name: - The name of the voice. Optional; if not set, the service will - choose a voice based on the other parameters such as - language\_code and gender. + The name of the voice. If not set, the service will choose a + voice based on the other parameters such as language\_code and + gender. ssml_gender: - The preferred gender of the voice. Optional; if not set, the - service will choose a voice based on the other parameters such - as language\_code and name. Note that this is only a - preference, not requirement; if a voice of the appropriate - gender is not available, the synthesizer should substitute a - voice with a different gender rather than failing the request. + The preferred gender of the voice. If not set, the service + will choose a voice based on the other parameters such as + language\_code and name. Note that this is only a preference, + not requirement; if a voice of the appropriate gender is not + available, the synthesizer should substitute a voice with a + different gender rather than failing the request. """, # @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1.VoiceSelectionParams) ), @@ -862,43 +868,44 @@ Attributes: audio_encoding: - Required. The format of the requested audio byte stream. + Required. The format of the audio byte stream. speaking_rate: - Optional speaking rate/speed, in the range [0.25, 4.0]. 1.0 is - the normal native speed supported by the specific voice. 2.0 - is twice as fast, and 0.5 is half as fast. If unset(0.0), - defaults to the native 1.0 speed. Any other values < 0.25 or > - 4.0 will return an error. + Optional. Input only. Speaking rate/speed, in the range [0.25, + 4.0]. 1.0 is the normal native speed supported by the specific + voice. 2.0 is twice as fast, and 0.5 is half as fast. If + unset(0.0), defaults to the native 1.0 speed. Any other values + < 0.25 or > 4.0 will return an error. pitch: - Optional speaking pitch, in the range [-20.0, 20.0]. 20 means - increase 20 semitones from the original pitch. -20 means - decrease 20 semitones from the original pitch. + Optional. Input only. Speaking pitch, in the range [-20.0, + 20.0]. 20 means increase 20 semitones from the original pitch. + -20 means decrease 20 semitones from the original pitch. volume_gain_db: - Optional volume gain (in dB) of the normal native volume - supported by the specific voice, in the range [-96.0, 16.0]. - If unset, or set to a value of 0.0 (dB), will play at normal - native signal amplitude. A value of -6.0 (dB) will play at - approximately half the amplitude of the normal native signal - amplitude. A value of +6.0 (dB) will play at approximately - twice the amplitude of the normal native signal amplitude. - Strongly recommend not to exceed +10 (dB) as there's usually - no effective increase in loudness for any value greater than - that. + Optional. Input only. Volume gain (in dB) of the normal native + volume supported by the specific voice, in the range [-96.0, + 16.0]. If unset, or set to a value of 0.0 (dB), will play at + normal native signal amplitude. A value of -6.0 (dB) will play + at approximately half the amplitude of the normal native + signal amplitude. A value of +6.0 (dB) will play at + approximately twice the amplitude of the normal native signal + amplitude. Strongly recommend not to exceed +10 (dB) as + there's usually no effective increase in loudness for any + value greater than that. sample_rate_hertz: - The synthesis sample rate (in hertz) for this audio. Optional. - If this is different from the voice's natural sample rate, - then the synthesizer will honor this request by converting to - the desired sample rate (which might result in worse audio + Optional. The synthesis sample rate (in hertz) for this audio. + When this is specified in SynthesizeSpeechRequest, if this is + different from the voice's natural sample rate, then the + synthesizer will honor this request by converting to the + desired sample rate (which might result in worse audio quality), unless the specified sample rate is not supported for the encoding chosen, in which case it will fail the request and return [google.rpc.Code.INVALID\_ARGUMENT][]. effects_profile_id: - An identifier which selects 'audio effects' profiles that are - applied on (post synthesized) text to speech. Effects are - applied on top of each other in the order they are given. See - `audio-profiles `__ for current supported profile - ids. + Optional. Input only. An identifier which selects 'audio + effects' profiles that are applied on (post synthesized) text + to speech. Effects are applied on top of each other in the + order they are given. See `audio profiles + `__ for current supported profile ids. """, # @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1.AudioConfig) ), @@ -917,10 +924,11 @@ Attributes: audio_content: The audio data bytes encoded as specified in the request, - including the header (For LINEAR16 audio, we include the WAV - header). Note: as with all bytes fields, protobuffers use a - pure binary representation, whereas JSON representations use - base64. + including the header for encodings that are wrapped in + containers (e.g. MP3, OGG\_OPUS). For LINEAR16 audio, we + include the WAV header. Note: as with all bytes fields, + protobuffers use a pure binary representation, whereas JSON + representations use base64. """, # @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1.SynthesizeSpeechResponse) ), @@ -929,15 +937,28 @@ DESCRIPTOR._options = None +_LISTVOICESREQUEST.fields_by_name["language_code"]._options = None +_SYNTHESIZESPEECHREQUEST.fields_by_name["input"]._options = None +_SYNTHESIZESPEECHREQUEST.fields_by_name["voice"]._options = None +_SYNTHESIZESPEECHREQUEST.fields_by_name["audio_config"]._options = None +_VOICESELECTIONPARAMS.fields_by_name["language_code"]._options = None +_AUDIOCONFIG.fields_by_name["audio_encoding"]._options = None +_AUDIOCONFIG.fields_by_name["speaking_rate"]._options = None +_AUDIOCONFIG.fields_by_name["pitch"]._options = None +_AUDIOCONFIG.fields_by_name["volume_gain_db"]._options = None +_AUDIOCONFIG.fields_by_name["sample_rate_hertz"]._options = None +_AUDIOCONFIG.fields_by_name["effects_profile_id"]._options = None _TEXTTOSPEECH = _descriptor.ServiceDescriptor( name="TextToSpeech", full_name="google.cloud.texttospeech.v1.TextToSpeech", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1229, - serialized_end=1541, + serialized_options=_b( + "\312A\033texttospeech.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1355, + serialized_end=1791, methods=[ _descriptor.MethodDescriptor( name="ListVoices", @@ -946,7 +967,9 @@ containing_service=None, input_type=_LISTVOICESREQUEST, output_type=_LISTVOICESRESPONSE, - serialized_options=_b("\202\323\344\223\002\014\022\n/v1/voices"), + serialized_options=_b( + "\202\323\344\223\002\014\022\n/v1/voices\332A\rlanguage_code" + ), ), _descriptor.MethodDescriptor( name="SynthesizeSpeech", @@ -956,7 +979,7 @@ input_type=_SYNTHESIZESPEECHREQUEST, output_type=_SYNTHESIZESPEECHRESPONSE, serialized_options=_b( - '\202\323\344\223\002\030"\023/v1/text:synthesize:\001*' + '\202\323\344\223\002\030"\023/v1/text:synthesize:\001*\332A\030input,voice,audio_config' ), ), ], diff --git a/texttospeech/google/cloud/texttospeech_v1beta1/gapic/enums.py b/texttospeech/google/cloud/texttospeech_v1beta1/gapic/enums.py index 97736e3a0080..3ac941cd65d6 100644 --- a/texttospeech/google/cloud/texttospeech_v1beta1/gapic/enums.py +++ b/texttospeech/google/cloud/texttospeech_v1beta1/gapic/enums.py @@ -28,7 +28,7 @@ class AudioEncoding(enum.IntEnum): AUDIO_ENCODING_UNSPECIFIED (int): Not specified. Will return result ``google.rpc.Code.INVALID_ARGUMENT``. LINEAR16 (int): Uncompressed 16-bit signed little-endian samples (Linear PCM). Audio content returned as LINEAR16 also contains a WAV header. - MP3 (int): MP3 audio. + MP3 (int): MP3 audio at 32kbps. OGG_OPUS (int): Opus encoded audio wrapped in an ogg container. The result will be a file which can be played natively on Android, and in browsers (at least Chrome and Firefox). The quality of the encoding is considerably higher diff --git a/texttospeech/google/cloud/texttospeech_v1beta1/gapic/text_to_speech_client.py b/texttospeech/google/cloud/texttospeech_v1beta1/gapic/text_to_speech_client.py index cf7b3e47dfe7..dcafa0fad9be 100644 --- a/texttospeech/google/cloud/texttospeech_v1beta1/gapic/text_to_speech_client.py +++ b/texttospeech/google/cloud/texttospeech_v1beta1/gapic/text_to_speech_client.py @@ -192,7 +192,7 @@ def list_voices( metadata=None, ): """ - Returns a list of ``Voice`` supported for synthesis. + Returns a list of Voice supported for synthesis. Example: >>> from google.cloud import texttospeech_v1beta1 @@ -202,7 +202,7 @@ def list_voices( >>> response = client.list_voices() Args: - language_code (str): Optional (but recommended) + language_code (str): Optional. Recommended. `BCP-47 `__ language tag. If specified, the ListVoices call will only return voices that can be used to synthesize this language\_code. E.g. when specifying "en-NZ", diff --git a/texttospeech/google/cloud/texttospeech_v1beta1/gapic/text_to_speech_client_config.py b/texttospeech/google/cloud/texttospeech_v1beta1/gapic/text_to_speech_client_config.py index bec87d4bd398..0463d0e9e778 100644 --- a/texttospeech/google/cloud/texttospeech_v1beta1/gapic/text_to_speech_client_config.py +++ b/texttospeech/google/cloud/texttospeech_v1beta1/gapic/text_to_speech_client_config.py @@ -18,13 +18,13 @@ }, "methods": { "ListVoices": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "SynthesizeSpeech": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, }, diff --git a/texttospeech/google/cloud/texttospeech_v1beta1/gapic/transports/text_to_speech_grpc_transport.py b/texttospeech/google/cloud/texttospeech_v1beta1/gapic/transports/text_to_speech_grpc_transport.py index f2c95085735d..80b403a14bf8 100644 --- a/texttospeech/google/cloud/texttospeech_v1beta1/gapic/transports/text_to_speech_grpc_transport.py +++ b/texttospeech/google/cloud/texttospeech_v1beta1/gapic/transports/text_to_speech_grpc_transport.py @@ -111,7 +111,7 @@ def channel(self): def list_voices(self): """Return the gRPC stub for :meth:`TextToSpeechClient.list_voices`. - Returns a list of ``Voice`` supported for synthesis. + Returns a list of Voice supported for synthesis. Returns: Callable: A callable which accepts the appropriate diff --git a/texttospeech/google/cloud/texttospeech_v1beta1/proto/cloud_tts.proto b/texttospeech/google/cloud/texttospeech_v1beta1/proto/cloud_tts.proto index 791499c4dda0..443ff6d56f61 100644 --- a/texttospeech/google/cloud/texttospeech_v1beta1/proto/cloud_tts.proto +++ b/texttospeech/google/cloud/texttospeech_v1beta1/proto/cloud_tts.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,43 +11,51 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.texttospeech.v1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.TextToSpeech.V1beta1"; option go_package = "google.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1;texttospeech"; option java_multiple_files = true; option java_outer_classname = "TextToSpeechProto"; option java_package = "com.google.cloud.texttospeech.v1beta1"; +option php_namespace = "Google\\Cloud\\TextToSpeech\\V1beta1"; // Service that implements Google Cloud Text-to-Speech API. service TextToSpeech { - // Returns a list of [Voice][google.cloud.texttospeech.v1beta1.Voice] - // supported for synthesis. + option (google.api.default_host) = "texttospeech.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Returns a list of Voice supported for synthesis. rpc ListVoices(ListVoicesRequest) returns (ListVoicesResponse) { option (google.api.http) = { get: "/v1beta1/voices" }; + option (google.api.method_signature) = "language_code"; } // Synthesizes speech synchronously: receive results after all text input // has been processed. - rpc SynthesizeSpeech(SynthesizeSpeechRequest) - returns (SynthesizeSpeechResponse) { + rpc SynthesizeSpeech(SynthesizeSpeechRequest) returns (SynthesizeSpeechResponse) { option (google.api.http) = { post: "/v1beta1/text:synthesize" body: "*" }; + option (google.api.method_signature) = "input,voice,audio_config"; } } // The top-level message sent by the client for the `ListVoices` method. message ListVoicesRequest { - // Optional (but recommended) + // Optional. Recommended. // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. If // specified, the ListVoices call will only return voices that can be used to // synthesize this language_code. E.g. when specifying "en-NZ", you will get @@ -55,7 +63,47 @@ message ListVoicesRequest { // "no-*" (Norwegian) and "nb-*" (Norwegian Bokmal) voices; specifying "zh" // will also get supported "cmn-*" voices; specifying "zh-hk" will also get // supported "yue-*" voices. - string language_code = 1; + string language_code = 1 [(google.api.field_behavior) = OPTIONAL]; +} + +// Gender of the voice as described in +// [SSML voice element](https://www.w3.org/TR/speech-synthesis11/#edef_voice). +enum SsmlVoiceGender { + // An unspecified gender. + // In VoiceSelectionParams, this means that the client doesn't care which + // gender the selected voice will have. In the Voice field of + // ListVoicesResponse, this may mean that the voice doesn't fit any of the + // other categories in this enum, or that the gender of the voice isn't known. + SSML_VOICE_GENDER_UNSPECIFIED = 0; + + // A male voice. + MALE = 1; + + // A female voice. + FEMALE = 2; + + // A gender-neutral voice. + NEUTRAL = 3; +} + +// Configuration to set up audio encoder. The encoding determines the output +// audio format that we'd like. +enum AudioEncoding { + // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][]. + AUDIO_ENCODING_UNSPECIFIED = 0; + + // Uncompressed 16-bit signed little-endian samples (Linear PCM). + // Audio content returned as LINEAR16 also contains a WAV header. + LINEAR16 = 1; + + // MP3 audio at 32kbps. + MP3 = 2; + + // Opus encoded audio wrapped in an ogg container. The result will be a + // file which can be played natively on Android, and in browsers (at least + // Chrome and Firefox). The quality of the encoding is considerably higher + // than MP3 while using approximately the same bitrate. + OGG_OPUS = 3; } // The message returned to the client by the `ListVoices` method. @@ -84,13 +132,13 @@ message Voice { // The top-level message sent by the client for the `SynthesizeSpeech` method. message SynthesizeSpeechRequest { // Required. The Synthesizer requires either plain text or SSML as input. - SynthesisInput input = 1; + SynthesisInput input = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The desired voice of the synthesized audio. - VoiceSelectionParams voice = 2; + VoiceSelectionParams voice = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The configuration of the synthesized audio. - AudioConfig audio_config = 3; + AudioConfig audio_config = 3 [(google.api.field_behavior) = REQUIRED]; } // Contains text input to be synthesized. Either `text` or `ssml` must be @@ -113,9 +161,9 @@ message SynthesisInput { // Description of which voice to use for a synthesis request. message VoiceSelectionParams { - // The language (and optionally also the region) of the voice expressed as a + // Required. The language (and potentially also the region) of the voice expressed as a // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag, e.g. - // "en-US". Required. This should not include a script tag (e.g. use + // "en-US". This should not include a script tag (e.g. use // "cmn-cn" rather than "cmn-Hant-cn"), because the script will be inferred // from the input provided in the SynthesisInput. The TTS service // will use this parameter to help choose an appropriate voice. Note that @@ -124,13 +172,13 @@ message VoiceSelectionParams { // (e.g. using en-US rather than en-CA if there isn't a Canadian voice // available), or even a different language, e.g. using "nb" (Norwegian // Bokmal) instead of "no" (Norwegian)". - string language_code = 1; + string language_code = 1 [(google.api.field_behavior) = REQUIRED]; - // The name of the voice. Optional; if not set, the service will choose a + // The name of the voice. If not set, the service will choose a // voice based on the other parameters such as language_code and gender. string name = 2; - // The preferred gender of the voice. Optional; if not set, the service will + // The preferred gender of the voice. If not set, the service will // choose a voice based on the other parameters such as language_code and // name. Note that this is only a preference, not requirement; if a // voice of the appropriate gender is not available, the synthesizer should @@ -140,89 +188,66 @@ message VoiceSelectionParams { // Description of audio data to be synthesized. message AudioConfig { - // Required. The format of the requested audio byte stream. - AudioEncoding audio_encoding = 1; - - // Optional speaking rate/speed, in the range [0.25, 4.0]. 1.0 is the normal - // native speed supported by the specific voice. 2.0 is twice as fast, and - // 0.5 is half as fast. If unset(0.0), defaults to the native 1.0 speed. Any - // other values < 0.25 or > 4.0 will return an error. - double speaking_rate = 2; - - // Optional speaking pitch, in the range [-20.0, 20.0]. 20 means increase 20 - // semitones from the original pitch. -20 means decrease 20 semitones from the - // original pitch. - double pitch = 3; - - // Optional volume gain (in dB) of the normal native volume supported by the - // specific voice, in the range [-96.0, 16.0]. If unset, or set to a value of - // 0.0 (dB), will play at normal native signal amplitude. A value of -6.0 (dB) - // will play at approximately half the amplitude of the normal native signal - // amplitude. A value of +6.0 (dB) will play at approximately twice the - // amplitude of the normal native signal amplitude. Strongly recommend not to - // exceed +10 (dB) as there's usually no effective increase in loudness for - // any value greater than that. - double volume_gain_db = 4; - - // The synthesis sample rate (in hertz) for this audio. Optional. If this is - // different from the voice's natural sample rate, then the synthesizer will - // honor this request by converting to the desired sample rate (which might - // result in worse audio quality), unless the specified sample rate is not - // supported for the encoding chosen, in which case it will fail the request - // and return [google.rpc.Code.INVALID_ARGUMENT][]. - int32 sample_rate_hertz = 5; - - // An identifier which selects 'audio effects' profiles that are applied on - // (post synthesized) text to speech. - // Effects are applied on top of each other in the order they are given. - repeated string effects_profile_id = 6; + // Required. The format of the audio byte stream. + AudioEncoding audio_encoding = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Input only. Speaking rate/speed, in the range [0.25, 4.0]. 1.0 is + // the normal native speed supported by the specific voice. 2.0 is twice as + // fast, and 0.5 is half as fast. If unset(0.0), defaults to the native 1.0 + // speed. Any other values < 0.25 or > 4.0 will return an error. + double speaking_rate = 2 [ + (google.api.field_behavior) = INPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; + + // Optional. Input only. Speaking pitch, in the range [-20.0, 20.0]. 20 means + // increase 20 semitones from the original pitch. -20 means decrease 20 + // semitones from the original pitch. + double pitch = 3 [ + (google.api.field_behavior) = INPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; + + // Optional. Input only. Volume gain (in dB) of the normal native volume + // supported by the specific voice, in the range [-96.0, 16.0]. If unset, or + // set to a value of 0.0 (dB), will play at normal native signal amplitude. A + // value of -6.0 (dB) will play at approximately half the amplitude of the + // normal native signal amplitude. A value of +6.0 (dB) will play at + // approximately twice the amplitude of the normal native signal amplitude. + // Strongly recommend not to exceed +10 (dB) as there's usually no effective + // increase in loudness for any value greater than that. + double volume_gain_db = 4 [ + (google.api.field_behavior) = INPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; + + // Optional. The synthesis sample rate (in hertz) for this audio. When this is + // specified in SynthesizeSpeechRequest, if this is different from the voice's + // natural sample rate, then the synthesizer will honor this request by + // converting to the desired sample rate (which might result in worse audio + // quality), unless the specified sample rate is not supported for the + // encoding chosen, in which case it will fail the request and return + // [google.rpc.Code.INVALID_ARGUMENT][]. + int32 sample_rate_hertz = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Input only. An identifier which selects 'audio effects' profiles + // that are applied on (post synthesized) text to speech. Effects are applied + // on top of each other in the order they are given. See + // [audio + // profiles](https://cloud.google.com/text-to-speech/docs/audio-profiles) for + // current supported profile ids. + repeated string effects_profile_id = 6 [ + (google.api.field_behavior) = INPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; } // The message returned to the client by the `SynthesizeSpeech` method. message SynthesizeSpeechResponse { // The audio data bytes encoded as specified in the request, including the - // header (For LINEAR16 audio, we include the WAV header). Note: as + // header for encodings that are wrapped in containers (e.g. MP3, OGG_OPUS). + // For LINEAR16 audio, we include the WAV header. Note: as // with all bytes fields, protobuffers use a pure binary representation, // whereas JSON representations use base64. bytes audio_content = 1; } - -// Gender of the voice as described in -// [SSML voice element](https://www.w3.org/TR/speech-synthesis11/#edef_voice). -enum SsmlVoiceGender { - // An unspecified gender. - // In VoiceSelectionParams, this means that the client doesn't care which - // gender the selected voice will have. In the Voice field of - // ListVoicesResponse, this may mean that the voice doesn't fit any of the - // other categories in this enum, or that the gender of the voice isn't known. - SSML_VOICE_GENDER_UNSPECIFIED = 0; - - // A male voice. - MALE = 1; - - // A female voice. - FEMALE = 2; - - // A gender-neutral voice. - NEUTRAL = 3; -} - -// Configuration to set up audio encoder. The encoding determines the output -// audio format that we'd like. -enum AudioEncoding { - // Not specified. Will return result [google.rpc.Code.INVALID_ARGUMENT][]. - AUDIO_ENCODING_UNSPECIFIED = 0; - - // Uncompressed 16-bit signed little-endian samples (Linear PCM). - // Audio content returned as LINEAR16 also contains a WAV header. - LINEAR16 = 1; - - // MP3 audio. - MP3 = 2; - - // Opus encoded audio wrapped in an ogg container. The result will be a - // file which can be played natively on Android, and in browsers (at least - // Chrome and Firefox). The quality of the encoding is considerably higher - // than MP3 while using approximately the same bitrate. - OGG_OPUS = 3; -} diff --git a/texttospeech/google/cloud/texttospeech_v1beta1/proto/cloud_tts_pb2.py b/texttospeech/google/cloud/texttospeech_v1beta1/proto/cloud_tts_pb2.py index 11080df95fbd..17dc91768978 100644 --- a/texttospeech/google/cloud/texttospeech_v1beta1/proto/cloud_tts_pb2.py +++ b/texttospeech/google/cloud/texttospeech_v1beta1/proto/cloud_tts_pb2.py @@ -17,6 +17,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -24,12 +26,16 @@ package="google.cloud.texttospeech.v1beta1", syntax="proto3", serialized_options=_b( - "\n%com.google.cloud.texttospeech.v1beta1B\021TextToSpeechProtoP\001ZMgoogle.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1;texttospeech\370\001\001" + "\n%com.google.cloud.texttospeech.v1beta1B\021TextToSpeechProtoP\001ZMgoogle.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1;texttospeech\370\001\001\252\002!Google.Cloud.TextToSpeech.V1beta1\312\002!Google\\Cloud\\TextToSpeech\\V1beta1" ), serialized_pb=_b( - '\n7google/cloud/texttospeech_v1beta1/proto/cloud_tts.proto\x12!google.cloud.texttospeech.v1beta1\x1a\x1cgoogle/api/annotations.proto"*\n\x11ListVoicesRequest\x12\x15\n\rlanguage_code\x18\x01 \x01(\t"N\n\x12ListVoicesResponse\x12\x38\n\x06voices\x18\x01 \x03(\x0b\x32(.google.cloud.texttospeech.v1beta1.Voice"\x99\x01\n\x05Voice\x12\x16\n\x0elanguage_codes\x18\x01 \x03(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12G\n\x0bssml_gender\x18\x03 \x01(\x0e\x32\x32.google.cloud.texttospeech.v1beta1.SsmlVoiceGender\x12!\n\x19natural_sample_rate_hertz\x18\x04 \x01(\x05"\xe9\x01\n\x17SynthesizeSpeechRequest\x12@\n\x05input\x18\x01 \x01(\x0b\x32\x31.google.cloud.texttospeech.v1beta1.SynthesisInput\x12\x46\n\x05voice\x18\x02 \x01(\x0b\x32\x37.google.cloud.texttospeech.v1beta1.VoiceSelectionParams\x12\x44\n\x0c\x61udio_config\x18\x03 \x01(\x0b\x32..google.cloud.texttospeech.v1beta1.AudioConfig"@\n\x0eSynthesisInput\x12\x0e\n\x04text\x18\x01 \x01(\tH\x00\x12\x0e\n\x04ssml\x18\x02 \x01(\tH\x00\x42\x0e\n\x0cinput_source"\x84\x01\n\x14VoiceSelectionParams\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12G\n\x0bssml_gender\x18\x03 \x01(\x0e\x32\x32.google.cloud.texttospeech.v1beta1.SsmlVoiceGender"\xcc\x01\n\x0b\x41udioConfig\x12H\n\x0e\x61udio_encoding\x18\x01 \x01(\x0e\x32\x30.google.cloud.texttospeech.v1beta1.AudioEncoding\x12\x15\n\rspeaking_rate\x18\x02 \x01(\x01\x12\r\n\x05pitch\x18\x03 \x01(\x01\x12\x16\n\x0evolume_gain_db\x18\x04 \x01(\x01\x12\x19\n\x11sample_rate_hertz\x18\x05 \x01(\x05\x12\x1a\n\x12\x65\x66\x66\x65\x63ts_profile_id\x18\x06 \x03(\t"1\n\x18SynthesizeSpeechResponse\x12\x15\n\raudio_content\x18\x01 \x01(\x0c*W\n\x0fSsmlVoiceGender\x12!\n\x1dSSML_VOICE_GENDER_UNSPECIFIED\x10\x00\x12\x08\n\x04MALE\x10\x01\x12\n\n\x06\x46\x45MALE\x10\x02\x12\x0b\n\x07NEUTRAL\x10\x03*T\n\rAudioEncoding\x12\x1e\n\x1a\x41UDIO_ENCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x07\n\x03MP3\x10\x02\x12\x0c\n\x08OGG_OPUS\x10\x03\x32\xd6\x02\n\x0cTextToSpeech\x12\x92\x01\n\nListVoices\x12\x34.google.cloud.texttospeech.v1beta1.ListVoicesRequest\x1a\x35.google.cloud.texttospeech.v1beta1.ListVoicesResponse"\x17\x82\xd3\xe4\x93\x02\x11\x12\x0f/v1beta1/voices\x12\xb0\x01\n\x10SynthesizeSpeech\x12:.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest\x1a;.google.cloud.texttospeech.v1beta1.SynthesizeSpeechResponse"#\x82\xd3\xe4\x93\x02\x1d"\x18/v1beta1/text:synthesize:\x01*B\x8e\x01\n%com.google.cloud.texttospeech.v1beta1B\x11TextToSpeechProtoP\x01ZMgoogle.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1;texttospeech\xf8\x01\x01\x62\x06proto3' + '\n7google/cloud/texttospeech_v1beta1/proto/cloud_tts.proto\x12!google.cloud.texttospeech.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto"/\n\x11ListVoicesRequest\x12\x1a\n\rlanguage_code\x18\x01 \x01(\tB\x03\xe0\x41\x01"N\n\x12ListVoicesResponse\x12\x38\n\x06voices\x18\x01 \x03(\x0b\x32(.google.cloud.texttospeech.v1beta1.Voice"\x99\x01\n\x05Voice\x12\x16\n\x0elanguage_codes\x18\x01 \x03(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12G\n\x0bssml_gender\x18\x03 \x01(\x0e\x32\x32.google.cloud.texttospeech.v1beta1.SsmlVoiceGender\x12!\n\x19natural_sample_rate_hertz\x18\x04 \x01(\x05"\xf8\x01\n\x17SynthesizeSpeechRequest\x12\x45\n\x05input\x18\x01 \x01(\x0b\x32\x31.google.cloud.texttospeech.v1beta1.SynthesisInputB\x03\xe0\x41\x02\x12K\n\x05voice\x18\x02 \x01(\x0b\x32\x37.google.cloud.texttospeech.v1beta1.VoiceSelectionParamsB\x03\xe0\x41\x02\x12I\n\x0c\x61udio_config\x18\x03 \x01(\x0b\x32..google.cloud.texttospeech.v1beta1.AudioConfigB\x03\xe0\x41\x02"@\n\x0eSynthesisInput\x12\x0e\n\x04text\x18\x01 \x01(\tH\x00\x12\x0e\n\x04ssml\x18\x02 \x01(\tH\x00\x42\x0e\n\x0cinput_source"\x89\x01\n\x14VoiceSelectionParams\x12\x1a\n\rlanguage_code\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0c\n\x04name\x18\x02 \x01(\t\x12G\n\x0bssml_gender\x18\x03 \x01(\x0e\x32\x32.google.cloud.texttospeech.v1beta1.SsmlVoiceGender"\xf6\x01\n\x0b\x41udioConfig\x12M\n\x0e\x61udio_encoding\x18\x01 \x01(\x0e\x32\x30.google.cloud.texttospeech.v1beta1.AudioEncodingB\x03\xe0\x41\x02\x12\x1d\n\rspeaking_rate\x18\x02 \x01(\x01\x42\x06\xe0\x41\x04\xe0\x41\x01\x12\x15\n\x05pitch\x18\x03 \x01(\x01\x42\x06\xe0\x41\x04\xe0\x41\x01\x12\x1e\n\x0evolume_gain_db\x18\x04 \x01(\x01\x42\x06\xe0\x41\x04\xe0\x41\x01\x12\x1e\n\x11sample_rate_hertz\x18\x05 \x01(\x05\x42\x03\xe0\x41\x01\x12"\n\x12\x65\x66\x66\x65\x63ts_profile_id\x18\x06 \x03(\tB\x06\xe0\x41\x04\xe0\x41\x01"1\n\x18SynthesizeSpeechResponse\x12\x15\n\raudio_content\x18\x01 \x01(\x0c*W\n\x0fSsmlVoiceGender\x12!\n\x1dSSML_VOICE_GENDER_UNSPECIFIED\x10\x00\x12\x08\n\x04MALE\x10\x01\x12\n\n\x06\x46\x45MALE\x10\x02\x12\x0b\n\x07NEUTRAL\x10\x03*T\n\rAudioEncoding\x12\x1e\n\x1a\x41UDIO_ENCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x07\n\x03MP3\x10\x02\x12\x0c\n\x08OGG_OPUS\x10\x03\x32\xd2\x03\n\x0cTextToSpeech\x12\xa2\x01\n\nListVoices\x12\x34.google.cloud.texttospeech.v1beta1.ListVoicesRequest\x1a\x35.google.cloud.texttospeech.v1beta1.ListVoicesResponse"\'\x82\xd3\xe4\x93\x02\x11\x12\x0f/v1beta1/voices\xda\x41\rlanguage_code\x12\xcb\x01\n\x10SynthesizeSpeech\x12:.google.cloud.texttospeech.v1beta1.SynthesizeSpeechRequest\x1a;.google.cloud.texttospeech.v1beta1.SynthesizeSpeechResponse">\x82\xd3\xe4\x93\x02\x1d"\x18/v1beta1/text:synthesize:\x01*\xda\x41\x18input,voice,audio_config\x1aO\xca\x41\x1btexttospeech.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xd6\x01\n%com.google.cloud.texttospeech.v1beta1B\x11TextToSpeechProtoP\x01ZMgoogle.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1;texttospeech\xf8\x01\x01\xaa\x02!Google.Cloud.TextToSpeech.V1beta1\xca\x02!Google\\Cloud\\TextToSpeech\\V1beta1b\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + ], ) _SSMLVOICEGENDER = _descriptor.EnumDescriptor( @@ -57,8 +63,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1099, - serialized_end=1186, + serialized_start=1224, + serialized_end=1311, ) _sym_db.RegisterEnumDescriptor(_SSMLVOICEGENDER) @@ -88,8 +94,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1188, - serialized_end=1272, + serialized_start=1313, + serialized_end=1397, ) _sym_db.RegisterEnumDescriptor(_AUDIOENCODING) @@ -126,7 +132,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -138,8 +144,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=124, - serialized_end=166, + serialized_start=182, + serialized_end=229, ) @@ -177,8 +183,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=168, - serialized_end=246, + serialized_start=231, + serialized_end=309, ) @@ -270,8 +276,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=249, - serialized_end=402, + serialized_start=312, + serialized_end=465, ) @@ -297,7 +303,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -315,7 +321,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -333,7 +339,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -345,8 +351,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=405, - serialized_end=638, + serialized_start=468, + serialized_end=716, ) @@ -410,8 +416,8 @@ fields=[], ) ], - serialized_start=640, - serialized_end=704, + serialized_start=718, + serialized_end=782, ) @@ -437,7 +443,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -485,8 +491,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=707, - serialized_end=839, + serialized_start=785, + serialized_end=922, ) @@ -512,7 +518,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -530,7 +536,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\004\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -548,7 +554,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\004\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -566,7 +572,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\004\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -584,7 +590,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -602,7 +608,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\004\340A\001"), file=DESCRIPTOR, ), ], @@ -614,8 +620,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=842, - serialized_end=1046, + serialized_start=925, + serialized_end=1171, ) @@ -653,8 +659,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1048, - serialized_end=1097, + serialized_start=1173, + serialized_end=1222, ) _LISTVOICESRESPONSE.fields_by_name["voices"].message_type = _VOICE @@ -699,7 +705,7 @@ Attributes: language_code: - Optional (but recommended) `BCP-47 `__ language tag. If specified, the ListVoices call will only return voices that can be used to synthesize this language\_code. E.g. when specifying "en- @@ -821,30 +827,30 @@ Attributes: language_code: - The language (and optionally also the region) of the voice - expressed as a `BCP-47 `__ language tag, e.g. "en-US". - Required. This should not include a script tag (e.g. use "cmn- - cn" rather than "cmn-Hant-cn"), because the script will be - inferred from the input provided in the SynthesisInput. The - TTS service will use this parameter to help choose an - appropriate voice. Note that the TTS service may choose a - voice with a slightly different language code than the one - selected; it may substitute a different region (e.g. using en- - US rather than en-CA if there isn't a Canadian voice - available), or even a different language, e.g. using "nb" - (Norwegian Bokmal) instead of "no" (Norwegian)". + This should not include a script tag (e.g. use "cmn-cn" rather + than "cmn-Hant-cn"), because the script will be inferred from + the input provided in the SynthesisInput. The TTS service will + use this parameter to help choose an appropriate voice. Note + that the TTS service may choose a voice with a slightly + different language code than the one selected; it may + substitute a different region (e.g. using en-US rather than + en-CA if there isn't a Canadian voice available), or even a + different language, e.g. using "nb" (Norwegian Bokmal) instead + of "no" (Norwegian)". name: - The name of the voice. Optional; if not set, the service will - choose a voice based on the other parameters such as - language\_code and gender. + The name of the voice. If not set, the service will choose a + voice based on the other parameters such as language\_code and + gender. ssml_gender: - The preferred gender of the voice. Optional; if not set, the - service will choose a voice based on the other parameters such - as language\_code and name. Note that this is only a - preference, not requirement; if a voice of the appropriate - gender is not available, the synthesizer should substitute a - voice with a different gender rather than failing the request. + The preferred gender of the voice. If not set, the service + will choose a voice based on the other parameters such as + language\_code and name. Note that this is only a preference, + not requirement; if a voice of the appropriate gender is not + available, the synthesizer should substitute a voice with a + different gender rather than failing the request. """, # @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1beta1.VoiceSelectionParams) ), @@ -862,40 +868,44 @@ Attributes: audio_encoding: - Required. The format of the requested audio byte stream. + Required. The format of the audio byte stream. speaking_rate: - Optional speaking rate/speed, in the range [0.25, 4.0]. 1.0 is - the normal native speed supported by the specific voice. 2.0 - is twice as fast, and 0.5 is half as fast. If unset(0.0), - defaults to the native 1.0 speed. Any other values < 0.25 or > - 4.0 will return an error. + Optional. Input only. Speaking rate/speed, in the range [0.25, + 4.0]. 1.0 is the normal native speed supported by the specific + voice. 2.0 is twice as fast, and 0.5 is half as fast. If + unset(0.0), defaults to the native 1.0 speed. Any other values + < 0.25 or > 4.0 will return an error. pitch: - Optional speaking pitch, in the range [-20.0, 20.0]. 20 means - increase 20 semitones from the original pitch. -20 means - decrease 20 semitones from the original pitch. + Optional. Input only. Speaking pitch, in the range [-20.0, + 20.0]. 20 means increase 20 semitones from the original pitch. + -20 means decrease 20 semitones from the original pitch. volume_gain_db: - Optional volume gain (in dB) of the normal native volume - supported by the specific voice, in the range [-96.0, 16.0]. - If unset, or set to a value of 0.0 (dB), will play at normal - native signal amplitude. A value of -6.0 (dB) will play at - approximately half the amplitude of the normal native signal - amplitude. A value of +6.0 (dB) will play at approximately - twice the amplitude of the normal native signal amplitude. - Strongly recommend not to exceed +10 (dB) as there's usually - no effective increase in loudness for any value greater than - that. + Optional. Input only. Volume gain (in dB) of the normal native + volume supported by the specific voice, in the range [-96.0, + 16.0]. If unset, or set to a value of 0.0 (dB), will play at + normal native signal amplitude. A value of -6.0 (dB) will play + at approximately half the amplitude of the normal native + signal amplitude. A value of +6.0 (dB) will play at + approximately twice the amplitude of the normal native signal + amplitude. Strongly recommend not to exceed +10 (dB) as + there's usually no effective increase in loudness for any + value greater than that. sample_rate_hertz: - The synthesis sample rate (in hertz) for this audio. Optional. - If this is different from the voice's natural sample rate, - then the synthesizer will honor this request by converting to - the desired sample rate (which might result in worse audio + Optional. The synthesis sample rate (in hertz) for this audio. + When this is specified in SynthesizeSpeechRequest, if this is + different from the voice's natural sample rate, then the + synthesizer will honor this request by converting to the + desired sample rate (which might result in worse audio quality), unless the specified sample rate is not supported for the encoding chosen, in which case it will fail the request and return [google.rpc.Code.INVALID\_ARGUMENT][]. effects_profile_id: - An identifier which selects 'audio effects' profiles that are - applied on (post synthesized) text to speech. Effects are - applied on top of each other in the order they are given. + Optional. Input only. An identifier which selects 'audio + effects' profiles that are applied on (post synthesized) text + to speech. Effects are applied on top of each other in the + order they are given. See `audio profiles + `__ for current supported profile ids. """, # @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1beta1.AudioConfig) ), @@ -914,10 +924,11 @@ Attributes: audio_content: The audio data bytes encoded as specified in the request, - including the header (For LINEAR16 audio, we include the WAV - header). Note: as with all bytes fields, protobuffers use a - pure binary representation, whereas JSON representations use - base64. + including the header for encodings that are wrapped in + containers (e.g. MP3, OGG\_OPUS). For LINEAR16 audio, we + include the WAV header. Note: as with all bytes fields, + protobuffers use a pure binary representation, whereas JSON + representations use base64. """, # @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1beta1.SynthesizeSpeechResponse) ), @@ -926,15 +937,28 @@ DESCRIPTOR._options = None +_LISTVOICESREQUEST.fields_by_name["language_code"]._options = None +_SYNTHESIZESPEECHREQUEST.fields_by_name["input"]._options = None +_SYNTHESIZESPEECHREQUEST.fields_by_name["voice"]._options = None +_SYNTHESIZESPEECHREQUEST.fields_by_name["audio_config"]._options = None +_VOICESELECTIONPARAMS.fields_by_name["language_code"]._options = None +_AUDIOCONFIG.fields_by_name["audio_encoding"]._options = None +_AUDIOCONFIG.fields_by_name["speaking_rate"]._options = None +_AUDIOCONFIG.fields_by_name["pitch"]._options = None +_AUDIOCONFIG.fields_by_name["volume_gain_db"]._options = None +_AUDIOCONFIG.fields_by_name["sample_rate_hertz"]._options = None +_AUDIOCONFIG.fields_by_name["effects_profile_id"]._options = None _TEXTTOSPEECH = _descriptor.ServiceDescriptor( name="TextToSpeech", full_name="google.cloud.texttospeech.v1beta1.TextToSpeech", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1275, - serialized_end=1617, + serialized_options=_b( + "\312A\033texttospeech.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1400, + serialized_end=1866, methods=[ _descriptor.MethodDescriptor( name="ListVoices", @@ -943,7 +967,9 @@ containing_service=None, input_type=_LISTVOICESREQUEST, output_type=_LISTVOICESRESPONSE, - serialized_options=_b("\202\323\344\223\002\021\022\017/v1beta1/voices"), + serialized_options=_b( + "\202\323\344\223\002\021\022\017/v1beta1/voices\332A\rlanguage_code" + ), ), _descriptor.MethodDescriptor( name="SynthesizeSpeech", @@ -953,7 +979,7 @@ input_type=_SYNTHESIZESPEECHREQUEST, output_type=_SYNTHESIZESPEECHRESPONSE, serialized_options=_b( - '\202\323\344\223\002\035"\030/v1beta1/text:synthesize:\001*' + '\202\323\344\223\002\035"\030/v1beta1/text:synthesize:\001*\332A\030input,voice,audio_config' ), ), ], diff --git a/texttospeech/google/cloud/texttospeech_v1beta1/proto/cloud_tts_pb2_grpc.py b/texttospeech/google/cloud/texttospeech_v1beta1/proto/cloud_tts_pb2_grpc.py index 3aaa81701692..a469e83d1f46 100644 --- a/texttospeech/google/cloud/texttospeech_v1beta1/proto/cloud_tts_pb2_grpc.py +++ b/texttospeech/google/cloud/texttospeech_v1beta1/proto/cloud_tts_pb2_grpc.py @@ -33,8 +33,7 @@ class TextToSpeechServicer(object): """ def ListVoices(self, request, context): - """Returns a list of [Voice][google.cloud.texttospeech.v1beta1.Voice] - supported for synthesis. + """Returns a list of Voice supported for synthesis. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/texttospeech/synth.metadata b/texttospeech/synth.metadata index 55fda3f44d74..e2cc9a3dd95a 100644 --- a/texttospeech/synth.metadata +++ b/texttospeech/synth.metadata @@ -1,19 +1,18 @@ { - "updateTime": "2019-08-06T12:43:45.265950Z", + "updateTime": "2019-09-10T12:35:21.962178Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.36.2", + "dockerImage": "googleapis/artman@sha256:0e6f3a668cd68afc768ecbe08817cf6e56a0e64fcbdb1c58c3b97492d12418a1" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "26e189ad03ba63591fb26eecb6aaade7ad39f57a" } }, { diff --git a/texttospeech/tests/system/v1/test_system_tts_v1.py b/texttospeech/tests/system/v1/test_system_tts_v1.py new file mode 100644 index 000000000000..4d90bae50427 --- /dev/null +++ b/texttospeech/tests/system/v1/test_system_tts_v1.py @@ -0,0 +1,38 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import texttospeech_v1 + + +class TestSystemSpeech(object): + def test_synthesize_speech(self): + client = texttospeech_v1.TextToSpeechClient() + + synthesis_input = texttospeech_v1.types.SynthesisInput(text="Hello, World!") + voice = texttospeech_v1.types.VoiceSelectionParams( + language_code="en-US", + ssml_gender=texttospeech_v1.enums.SsmlVoiceGender.NEUTRAL, + ) + audio_config = texttospeech_v1.types.AudioConfig( + audio_encoding=texttospeech_v1.enums.AudioEncoding.MP3 + ) + + response = client.synthesize_speech(synthesis_input, voice, audio_config) + assert response.audio_content is not None + + def test_list_voices(self): + client = texttospeech_v1.TextToSpeechClient() + + voices = client.list_voices() + assert len(voices.voices) > 0 diff --git a/texttospeech/tests/system/v1beta1/test_system_tts_v1beta1.py b/texttospeech/tests/system/v1beta1/test_system_tts_v1beta1.py new file mode 100644 index 000000000000..c6058acf27a0 --- /dev/null +++ b/texttospeech/tests/system/v1beta1/test_system_tts_v1beta1.py @@ -0,0 +1,40 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import texttospeech_v1beta1 + + +class TestSystemSpeech(object): + def test_synthesize_speech(self): + client = texttospeech_v1beta1.TextToSpeechClient() + + synthesis_input = texttospeech_v1beta1.types.SynthesisInput( + text="Hello, World!" + ) + voice = texttospeech_v1beta1.types.VoiceSelectionParams( + language_code="en-US", + ssml_gender=texttospeech_v1beta1.enums.SsmlVoiceGender.NEUTRAL, + ) + audio_config = texttospeech_v1beta1.types.AudioConfig( + audio_encoding=texttospeech_v1beta1.enums.AudioEncoding.MP3 + ) + + response = client.synthesize_speech(synthesis_input, voice, audio_config) + assert response.audio_content is not None + + def test_list_voices(self): + client = texttospeech_v1beta1.TextToSpeechClient() + + voices = client.list_voices() + assert len(voices.voices) > 0 diff --git a/trace/CHANGELOG.md b/trace/CHANGELOG.md index 4c1344b4b03c..adb6e946901c 100644 --- a/trace/CHANGELOG.md +++ b/trace/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-trace/#history +## 0.23.0 + +10-15-2019 06:59 PDT + + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +### Documentation +- Change requests intersphinx url (via synth). ([#9410](https://github.com/googleapis/google-cloud-python/pull/9410)) +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) + ## 0.22.1 08-12-2019 13:51 PDT diff --git a/trace/docs/conf.py b/trace/docs/conf.py index 8c915280dd6c..38a225821c83 100644 --- a/trace/docs/conf.py +++ b/trace/docs/conf.py @@ -264,7 +264,7 @@ u"google-cloud-trace Documentation", author, "manual", - ) + ), ] # The name of an image file (relative to this directory) to place at the top of @@ -314,7 +314,7 @@ "google-cloud-trace", "GAPIC library for the {metadata.shortName} v2 service", "APIs", - ) + ), ] # Documents to append as an appendix to all manuals. @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/trace/google/cloud/trace/_gapic.py b/trace/google/cloud/trace/_gapic.py index 435d9fcc6c17..00ac94d336d4 100644 --- a/trace/google/cloud/trace/_gapic.py +++ b/trace/google/cloud/trace/_gapic.py @@ -314,6 +314,8 @@ def make_trace_api(client): proper configurations. """ generated = trace_service_client.TraceServiceClient( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) return _TraceAPI(generated, client) diff --git a/trace/google/cloud/trace/client.py b/trace/google/cloud/trace/client.py index c4c9d5dd6840..90ddd7134c85 100644 --- a/trace/google/cloud/trace/client.py +++ b/trace/google/cloud/trace/client.py @@ -39,6 +39,9 @@ class Client(ClientWithProject): requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own library or partner tool. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. """ SCOPE = ( @@ -49,9 +52,16 @@ class Client(ClientWithProject): _trace_api = None - def __init__(self, project=None, credentials=None, client_info=_CLIENT_INFO): + def __init__( + self, + project=None, + credentials=None, + client_info=_CLIENT_INFO, + client_options=None, + ): super(Client, self).__init__(project=project, credentials=credentials) self._client_info = client_info + self._client_options = client_options @property def trace_api(self): diff --git a/trace/google/cloud/trace/v1/_gapic.py b/trace/google/cloud/trace/v1/_gapic.py index 365f535eba5c..a5f5b2f7f0ab 100644 --- a/trace/google/cloud/trace/v1/_gapic.py +++ b/trace/google/cloud/trace/v1/_gapic.py @@ -178,7 +178,9 @@ def make_trace_api(client): proper configurations. """ generated = trace_service_client.TraceServiceClient( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) return _TraceAPI(generated, client) diff --git a/trace/google/cloud/trace/v1/client.py b/trace/google/cloud/trace/v1/client.py index 266158933294..2b79773078a4 100644 --- a/trace/google/cloud/trace/v1/client.py +++ b/trace/google/cloud/trace/v1/client.py @@ -41,6 +41,9 @@ class Client(ClientWithProject): requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own library or partner tool. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. """ SCOPE = ( @@ -51,9 +54,16 @@ class Client(ClientWithProject): _trace_api = None - def __init__(self, project=None, credentials=None, client_info=_CLIENT_INFO): + def __init__( + self, + project=None, + credentials=None, + client_info=_CLIENT_INFO, + client_options=None, + ): super(Client, self).__init__(project=project, credentials=credentials) self._client_info = client_info + self._client_options = client_options @property def trace_api(self): diff --git a/trace/google/cloud/trace_v1/__init__.py b/trace/google/cloud/trace_v1/__init__.py index ce7863f3616b..5af055f3da5f 100644 --- a/trace/google/cloud/trace_v1/__init__.py +++ b/trace/google/cloud/trace_v1/__init__.py @@ -27,4 +27,8 @@ class TraceServiceClient(trace_service_client.TraceServiceClient): enums = enums -__all__ = ("enums", "types", "TraceServiceClient") +__all__ = ( + "enums", + "types", + "TraceServiceClient", +) diff --git a/trace/google/cloud/trace_v1/gapic/trace_service_client.py b/trace/google/cloud/trace_v1/gapic/trace_service_client.py index 0e27928d6899..394965726520 100644 --- a/trace/google/cloud/trace_v1/gapic/trace_service_client.py +++ b/trace/google/cloud/trace_v1/gapic/trace_service_client.py @@ -39,7 +39,7 @@ from google.protobuf import timestamp_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-trace").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-trace",).version class TraceServiceClient(object): @@ -165,12 +165,12 @@ def __init__( self.transport = transport else: self.transport = trace_service_grpc_transport.TraceServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -181,7 +181,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -252,7 +252,7 @@ def patch_traces( client_info=self._client_info, ) - request = trace_pb2.PatchTracesRequest(project_id=project_id, traces=traces) + request = trace_pb2.PatchTracesRequest(project_id=project_id, traces=traces,) if metadata is None: metadata = [] metadata = list(metadata) @@ -327,7 +327,7 @@ def get_trace( client_info=self._client_info, ) - request = trace_pb2.GetTraceRequest(project_id=project_id, trace_id=trace_id) + request = trace_pb2.GetTraceRequest(project_id=project_id, trace_id=trace_id,) return self._inner_api_calls["get_trace"]( request, retry=retry, timeout=timeout, metadata=metadata ) diff --git a/trace/google/cloud/trace_v1/gapic/transports/trace_service_grpc_transport.py b/trace/google/cloud/trace_v1/gapic/transports/trace_service_grpc_transport.py index ee3e1908808b..caab89f084d2 100644 --- a/trace/google/cloud/trace_v1/gapic/transports/trace_service_grpc_transport.py +++ b/trace/google/cloud/trace_v1/gapic/transports/trace_service_grpc_transport.py @@ -57,7 +57,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -75,7 +75,9 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = {"trace_service_stub": trace_pb2_grpc.TraceServiceStub(channel)} + self._stubs = { + "trace_service_stub": trace_pb2_grpc.TraceServiceStub(channel), + } @classmethod def create_channel( diff --git a/trace/google/cloud/trace_v1/proto/trace_pb2.py b/trace/google/cloud/trace_v1/proto/trace_pb2.py index 25ac2a5cf6d9..b38ce92bc436 100644 --- a/trace/google/cloud/trace_v1/proto/trace_pb2.py +++ b/trace/google/cloud/trace_v1/proto/trace_pb2.py @@ -195,7 +195,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -401,8 +401,8 @@ ), ], extensions=[], - nested_types=[_TRACESPAN_LABELSENTRY], - enum_types=[_TRACESPAN_SPANKIND], + nested_types=[_TRACESPAN_LABELSENTRY,], + enum_types=[_TRACESPAN_SPANKIND,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -567,7 +567,7 @@ ], extensions=[], nested_types=[], - enum_types=[_LISTTRACESREQUEST_VIEWTYPE], + enum_types=[_LISTTRACESREQUEST_VIEWTYPE,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/trace/google/cloud/trace_v1/types.py b/trace/google/cloud/trace_v1/types.py index 058320e083da..6fa160d2e012 100644 --- a/trace/google/cloud/trace_v1/types.py +++ b/trace/google/cloud/trace_v1/types.py @@ -25,9 +25,14 @@ from google.protobuf import timestamp_pb2 -_shared_modules = [empty_pb2, timestamp_pb2] - -_local_modules = [trace_pb2] +_shared_modules = [ + empty_pb2, + timestamp_pb2, +] + +_local_modules = [ + trace_pb2, +] names = [] diff --git a/trace/google/cloud/trace_v2/__init__.py b/trace/google/cloud/trace_v2/__init__.py index c285c7cc0500..0678603445f7 100644 --- a/trace/google/cloud/trace_v2/__init__.py +++ b/trace/google/cloud/trace_v2/__init__.py @@ -27,4 +27,8 @@ class TraceServiceClient(trace_service_client.TraceServiceClient): enums = enums -__all__ = ("enums", "types", "TraceServiceClient") +__all__ = ( + "enums", + "types", + "TraceServiceClient", +) diff --git a/trace/google/cloud/trace_v2/gapic/trace_service_client.py b/trace/google/cloud/trace_v2/gapic/trace_service_client.py index ef489dc0d785..de2f3c088d2d 100644 --- a/trace/google/cloud/trace_v2/gapic/trace_service_client.py +++ b/trace/google/cloud/trace_v2/gapic/trace_service_client.py @@ -41,7 +41,7 @@ from google.rpc import status_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-trace").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-trace",).version class TraceServiceClient(object): @@ -84,7 +84,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) @classmethod @@ -184,12 +184,12 @@ def __init__( self.transport = transport else: self.transport = trace_service_grpc_transport.TraceServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -200,7 +200,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -269,7 +269,7 @@ def batch_write_spans( client_info=self._client_info, ) - request = tracing_pb2.BatchWriteSpansRequest(name=name, spans=spans) + request = tracing_pb2.BatchWriteSpansRequest(name=name, spans=spans,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/trace/google/cloud/trace_v2/gapic/transports/trace_service_grpc_transport.py b/trace/google/cloud/trace_v2/gapic/transports/trace_service_grpc_transport.py index 62e908229d96..efc84d0e6418 100644 --- a/trace/google/cloud/trace_v2/gapic/transports/trace_service_grpc_transport.py +++ b/trace/google/cloud/trace_v2/gapic/transports/trace_service_grpc_transport.py @@ -56,7 +56,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -74,7 +74,9 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = {"trace_service_stub": tracing_pb2_grpc.TraceServiceStub(channel)} + self._stubs = { + "trace_service_stub": tracing_pb2_grpc.TraceServiceStub(channel), + } @classmethod def create_channel( diff --git a/trace/google/cloud/trace_v2/proto/trace_pb2.py b/trace/google/cloud/trace_v2/proto/trace_pb2.py index 5298cb85dfe1..22825ebdb550 100644 --- a/trace/google/cloud/trace_v2/proto/trace_pb2.py +++ b/trace/google/cloud/trace_v2/proto/trace_pb2.py @@ -204,7 +204,7 @@ ), ], extensions=[], - nested_types=[_SPAN_ATTRIBUTES_ATTRIBUTEMAPENTRY], + nested_types=[_SPAN_ATTRIBUTES_ATTRIBUTEMAPENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -353,7 +353,7 @@ ], extensions=[], nested_types=[], - enum_types=[_SPAN_TIMEEVENT_MESSAGEEVENT_TYPE], + enum_types=[_SPAN_TIMEEVENT_MESSAGEEVENT_TYPE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -426,7 +426,7 @@ ), ], extensions=[], - nested_types=[_SPAN_TIMEEVENT_ANNOTATION, _SPAN_TIMEEVENT_MESSAGEEVENT], + nested_types=[_SPAN_TIMEEVENT_ANNOTATION, _SPAN_TIMEEVENT_MESSAGEEVENT,], enum_types=[], serialized_options=None, is_extendable=False, @@ -439,7 +439,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1086, serialized_end=1693, @@ -601,7 +601,7 @@ ], extensions=[], nested_types=[], - enum_types=[_SPAN_LINK_TYPE], + enum_types=[_SPAN_LINK_TYPE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -1004,7 +1004,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=2195, serialized_end=2337, @@ -1258,7 +1258,7 @@ ), ], extensions=[], - nested_types=[_STACKTRACE_STACKFRAME, _STACKTRACE_STACKFRAMES], + nested_types=[_STACKTRACE_STACKFRAME, _STACKTRACE_STACKFRAMES,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/trace/google/cloud/trace_v2/types.py b/trace/google/cloud/trace_v2/types.py index f6c6af03351e..3e7a67042e25 100644 --- a/trace/google/cloud/trace_v2/types.py +++ b/trace/google/cloud/trace_v2/types.py @@ -29,9 +29,18 @@ from google.rpc import status_pb2 -_shared_modules = [any_pb2, empty_pb2, timestamp_pb2, wrappers_pb2, status_pb2] - -_local_modules = [trace_pb2, tracing_pb2] +_shared_modules = [ + any_pb2, + empty_pb2, + timestamp_pb2, + wrappers_pb2, + status_pb2, +] + +_local_modules = [ + trace_pb2, + tracing_pb2, +] names = [] diff --git a/trace/setup.py b/trace/setup.py index d9c84832df9b..015b48f9973f 100644 --- a/trace/setup.py +++ b/trace/setup.py @@ -22,15 +22,15 @@ name = 'google-cloud-trace' description = 'Stackdriver Trace API client library' -version = '0.22.1' +version = '0.23.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 3 - Alpha' dependencies = [ - 'google-api-core[grpc] >= 1.14.0, < 2.0.0dev', - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", ] extras = { } diff --git a/trace/synth.metadata b/trace/synth.metadata index c3bfe0244394..925e26975512 100644 --- a/trace/synth.metadata +++ b/trace/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:44:36.453837Z", + "updateTime": "2019-10-29T12:42:54.805714Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/trace/tests/unit/v1/test__gapic_v1.py b/trace/tests/unit/v1/test__gapic_v1.py index bd21f026d056..bf9103e7abe5 100644 --- a/trace/tests/unit/v1/test__gapic_v1.py +++ b/trace/tests/unit/v1/test__gapic_v1.py @@ -227,7 +227,7 @@ def _call_fut(self, client): def test_it(self): from google.cloud.trace.v1._gapic import _TraceAPI - client = mock.Mock(spec=["_credentials", "_client_info"]) + client = mock.Mock(spec=["_credentials", "_client_info", "_client_options"]) patch_api = mock.patch( "google.cloud.trace.v1._gapic.trace_service_client.TraceServiceClient" @@ -237,7 +237,9 @@ def test_it(self): trace_api = self._call_fut(client) patched.assert_called_once_with( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) self.assertIsInstance(trace_api, _TraceAPI) diff --git a/trace/tests/unit/v1/test_client_v1.py b/trace/tests/unit/v1/test_client_v1.py index d3e1c391592a..9f50e868caad 100644 --- a/trace/tests/unit/v1/test_client_v1.py +++ b/trace/tests/unit/v1/test_client_v1.py @@ -47,11 +47,16 @@ def test_constructor_defaults(self): def test_constructor_explicit(self): credentials = _make_credentials() client_info = mock.Mock() + client_options = mock.Mock() client = self._make_one( - project=self.project, credentials=credentials, client_info=client_info + project=self.project, + credentials=credentials, + client_info=client_info, + client_options=client_options, ) self.assertEqual(client.project, self.project) self.assertIs(client._client_info, client_info) + self.assertIs(client._client_options, client_options) def test_trace_api(self): clients = [] diff --git a/trace/tests/unit/v2/test__gapic_v2.py b/trace/tests/unit/v2/test__gapic_v2.py index e6859b5964ef..fd647240dba7 100644 --- a/trace/tests/unit/v2/test__gapic_v2.py +++ b/trace/tests/unit/v2/test__gapic_v2.py @@ -272,7 +272,7 @@ def _call_fut(self, client): def test_it(self): from google.cloud.trace._gapic import _TraceAPI - client = mock.Mock(spec=["_credentials", "_client_info"]) + client = mock.Mock(spec=["_credentials", "_client_info", "_client_options"]) patch_api = mock.patch( "google.cloud.trace._gapic.trace_service_client.TraceServiceClient" @@ -282,7 +282,9 @@ def test_it(self): trace_api = self._call_fut(client) patched.assert_called_once_with( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) self.assertIsInstance(trace_api, _TraceAPI) diff --git a/trace/tests/unit/v2/test_client_v2.py b/trace/tests/unit/v2/test_client_v2.py index d5a14aeb6c92..4cd8a2a8705a 100644 --- a/trace/tests/unit/v2/test_client_v2.py +++ b/trace/tests/unit/v2/test_client_v2.py @@ -47,10 +47,15 @@ def test_constructor_defaults(self): def test_constructor_explicit(self): credentials = _make_credentials() client_info = mock.Mock() + client_options = mock.Mock() client = self._make_one( - project=self.project, credentials=credentials, client_info=client_info + project=self.project, + credentials=credentials, + client_info=client_info, + client_options=client_options, ) self.assertEqual(client.project, self.project) + self.assertIs(client._client_options, client_options) def test_trace_api(self): clients = [] diff --git a/translate/CHANGELOG.md b/translate/CHANGELOG.md index 7cdbb9408612..594d539a168c 100644 --- a/translate/CHANGELOG.md +++ b/translate/CHANGELOG.md @@ -4,6 +4,42 @@ [1]: https://pypi.org/project/google-cloud-translate/#history +## 2.0.0 + +10-23-2019 11:13 PDT + +### New Features +- Make v3 the default client. ([#9498](https://github.com/googleapis/google-cloud-python/pull/9498)) + +### Internal / Testing Changes +- Add VPC-SC system tests. ([#9272](https://github.com/googleapis/google-cloud-python/pull/9272)) + +## 1.7.0 + +10-07-2019 14:57 PDT + +### Implementation Changes +- Update docstrings, client confg (via synth). ([#9411](https://github.com/googleapis/google-cloud-python/pull/9411)) +- Remove send / receive message size limit (via synth). ([#8974](https://github.com/googleapis/google-cloud-python/pull/8974)) + +### New Features +- Add support for V3 of the API. ([#9020](https://github.com/googleapis/google-cloud-python/pull/9020)) +- Make `parent` argument required for all client methods in v3beta1; add `labels` argument (via synth). ([#9354](https://github.com/googleapis/google-cloud-python/pull/9354)) +- Add client options to translate_v2. ([#8737](https://github.com/googleapis/google-cloud-python/pull/8737)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Fix links to reference documentation. ([#8884](https://github.com/googleapis/google-cloud-python/pull/8884)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) + +### Internal / Testing Changes +- Update `ListGlossaries` method annotation (via synth) ([#9385](https://github.com/googleapis/google-cloud-python/pull/9385)) +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 1.6.0 07-09-2019 13:13 PDT diff --git a/translate/README.rst b/translate/README.rst index af398ee4e3a0..1ca13ab74749 100644 --- a/translate/README.rst +++ b/translate/README.rst @@ -81,41 +81,4 @@ Windows pip install virtualenv virtualenv \Scripts\activate - \Scripts\pip.exe install google-cloud-translate - - -Example Usage -~~~~~~~~~~~~~ - -.. code-block:: python - - >>> from google.cloud import translate - >>> client = translate.Client() - >>> client.get_languages() - [ - { - 'language': 'af', - 'name': 'Afrikaans', - }, - ... - ] - >>> client.detect_language(['Me llamo', 'I am']) - [ - { - 'confidence': 0.25830904, - 'input': 'Me llamo', - 'language': 'es', - }, { - 'confidence': 0.17112699, - 'input': 'I am', - 'language': 'en', - }, - ] - >>> from google.cloud import translate - >>> client = translate.Client() - >>> client.translate('koszula') - { - 'translatedText': 'shirt', - 'detectedSourceLanguage': 'pl', - 'input': 'koszula', - } + \Scripts\pip.exe install google-cloud-translate \ No newline at end of file diff --git a/translate/docs/conf.py b/translate/docs/conf.py index 1e8b3b27ad70..15ac56f76839 100644 --- a/translate/docs/conf.py +++ b/translate/docs/conf.py @@ -339,7 +339,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/translate/docs/gapic/v3/api.rst b/translate/docs/gapic/v3/api.rst new file mode 100644 index 000000000000..e16ef6a89d51 --- /dev/null +++ b/translate/docs/gapic/v3/api.rst @@ -0,0 +1,6 @@ +Client for Cloud Translation API +================================ + +.. automodule:: google.cloud.translate_v3 + :members: + :inherited-members: \ No newline at end of file diff --git a/translate/docs/gapic/v3/types.rst b/translate/docs/gapic/v3/types.rst new file mode 100644 index 000000000000..fb83ab0657b6 --- /dev/null +++ b/translate/docs/gapic/v3/types.rst @@ -0,0 +1,5 @@ +Types for Cloud Translation API Client +====================================== + +.. automodule:: google.cloud.translate_v3.types + :members: \ No newline at end of file diff --git a/translate/docs/index.rst b/translate/docs/index.rst index 8b370cfdae11..b5d2405e2f47 100644 --- a/translate/docs/index.rst +++ b/translate/docs/index.rst @@ -1,31 +1,23 @@ .. include:: README.rst -Detailed Usage Guide --------------------- -.. toctree:: - :maxdepth: 2 - - usage - API Reference ------------- -A new beta release, spelled ``v3beta1``, is provided to provide for preview -of upcoming features. In order to use this, you will want to import from -``google.cloud.translate_v3beta1`` in lieu of ``google.cloud.translate``. +An API and type reference is provided for ``v3``, ``v3beta1``, and ``v2``. -.. toctree:: - :maxdepth: 2 - - gapic/v3beta1/api - gapic/v3beta1/types +By default, you will get ``v3``. A beta release, spelled ``v3beta1`` is +provided for preview of upcoming features. In order to use this, you will +want to import from ``google.cloud.translate_v3beta1`` in lieu of +``google.cloud.translate``. The previous release ``v2`` is also available. +Import from ``google.cloud.translate_v2`` to use this release. -An API and type reference is provided for v2: .. toctree:: - :maxdepth: 2 - - client + :maxdepth: 2 + + v3 + v3beta1 + v2 Changelog diff --git a/translate/docs/usage.rst b/translate/docs/usage.rst index 5fc2767c06e6..c6ee312c0cac 100644 --- a/translate/docs/usage.rst +++ b/translate/docs/usage.rst @@ -1,12 +1,12 @@ -Using the Translate Client +Translation v2 Usage Guide -------------------------- To create a client: .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client() + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client() By default, the client targets English when doing detections and translations, but a non-default value can be used as @@ -14,21 +14,21 @@ well: .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client(target_language='es') + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client(target_language='es') -The Google Cloud Translation API has three supported methods, and they +The Google Cloud Translation v2 API has three supported methods, and they map to three methods on a client: -:meth:`~google.cloud.translate.client.Client.get_languages`, -:meth:`~google.cloud.translate.client.Client.detect_language` and -:meth:`~google.cloud.translate.client.Client.translate`. +:meth:`~google.cloud.translate_v2.client.Client.get_languages`, +:meth:`~google.cloud.translate_v2.client.Client.detect_language` and +:meth:`~google.cloud.translate_v2.client.Client.translate`. To get a list of languages supported by the Google Cloud Translation API .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client() + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client() >>> client.get_languages() [ { @@ -42,8 +42,8 @@ To detect the language that some given text is written in: .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client() + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client() >>> client.detect_language(['Me llamo', 'I am']) [ { @@ -68,8 +68,8 @@ the source language: .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client() + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client() >>> client.translate('koszula') { 'translatedText': 'shirt', @@ -81,8 +81,8 @@ If the source language is known: .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client() + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client() >>> client.translate('camisa', source_language='es') { 'translatedText': 'shirt', @@ -93,8 +93,8 @@ or to use a non-default target language: .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client() + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client() >>> client.translate(['Me llamo Jeff', 'My name is Jeff'], ... target_language='de') [ diff --git a/translate/docs/v2.rst b/translate/docs/v2.rst new file mode 100644 index 000000000000..0e056c8ff63b --- /dev/null +++ b/translate/docs/v2.rst @@ -0,0 +1,8 @@ +v2 +=== + +.. toctree:: + :maxdepth: 2 + + client + usage diff --git a/translate/docs/v3.rst b/translate/docs/v3.rst new file mode 100644 index 000000000000..b6b8cd17e1e6 --- /dev/null +++ b/translate/docs/v3.rst @@ -0,0 +1,8 @@ +v3 +=== + +.. toctree:: + :maxdepth: 2 + + gapic/v3/api + gapic/v3/types diff --git a/translate/docs/v3beta1.rst b/translate/docs/v3beta1.rst new file mode 100644 index 000000000000..368aed30e243 --- /dev/null +++ b/translate/docs/v3beta1.rst @@ -0,0 +1,8 @@ +v3beta1 +======= + +.. toctree:: + :maxdepth: 2 + + gapic/v3beta1/api + gapic/v3beta1/types diff --git a/translate/google/cloud/translate.py b/translate/google/cloud/translate.py index 947f8c8ae093..27d23f137628 100644 --- a/translate/google/cloud/translate.py +++ b/translate/google/cloud/translate.py @@ -1,10 +1,12 @@ -# Copyright 2017 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,16 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Google Cloud Translation API wrapper.""" - -from google.cloud.translate_v2 import __version__ -from google.cloud.translate_v2.client import Client +from __future__ import absolute_import -# These constants are essentially deprecated; strings should be used instead. -# They are imported here for backwards compatibility. -from google.cloud.translate_v2.client import BASE -from google.cloud.translate_v2.client import NMT +from google.cloud.translate_v3 import TranslationServiceClient +from google.cloud.translate_v3 import types -__all__ = ("__version__", "BASE", "Client", "NMT") +__all__ = ("types", "TranslationServiceClient") diff --git a/translate/google/cloud/translate_v3/__init__.py b/translate/google/cloud/translate_v3/__init__.py new file mode 100644 index 000000000000..f4e61721160a --- /dev/null +++ b/translate/google/cloud/translate_v3/__init__.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.cloud.translate_v3 import types +from google.cloud.translate_v3.gapic import translation_service_client + + +class TranslationServiceClient(translation_service_client.TranslationServiceClient): + __doc__ = translation_service_client.TranslationServiceClient.__doc__ + + +__all__ = ("types", "TranslationServiceClient") diff --git a/translate/google/cloud/translate_v3/gapic/__init__.py b/translate/google/cloud/translate_v3/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/translate/google/cloud/translate_v3/gapic/enums.py b/translate/google/cloud/translate_v3/gapic/enums.py new file mode 100644 index 000000000000..051328dab5b6 --- /dev/null +++ b/translate/google/cloud/translate_v3/gapic/enums.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class BatchTranslateMetadata(object): + class State(enum.IntEnum): + """ + State of the job. + + Attributes: + STATE_UNSPECIFIED (int): Invalid. + RUNNING (int): Request is being processed. + SUCCEEDED (int): The batch is processed, and at least one item was successfully + processed. + FAILED (int): The batch is done and no item was successfully processed. + CANCELLING (int): Request is in the process of being canceled after caller invoked + longrunning.Operations.CancelOperation on the request id. + CANCELLED (int): The batch is done after the user has called the + longrunning.Operations.CancelOperation. Any records processed before the + cancel command are output as specified in the request. + """ + + STATE_UNSPECIFIED = 0 + RUNNING = 1 + SUCCEEDED = 2 + FAILED = 3 + CANCELLING = 4 + CANCELLED = 5 + + +class CreateGlossaryMetadata(object): + class State(enum.IntEnum): + """ + Enumerates the possible states that the creation request can be in. + + Attributes: + STATE_UNSPECIFIED (int): Invalid. + RUNNING (int): Request is being processed. + SUCCEEDED (int): The glossary was successfully created. + FAILED (int): Failed to create the glossary. + CANCELLING (int): Request is in the process of being canceled after caller invoked + longrunning.Operations.CancelOperation on the request id. + CANCELLED (int): The glossary creation request was successfully canceled. + """ + + STATE_UNSPECIFIED = 0 + RUNNING = 1 + SUCCEEDED = 2 + FAILED = 3 + CANCELLING = 4 + CANCELLED = 5 + + +class DeleteGlossaryMetadata(object): + class State(enum.IntEnum): + """ + Enumerates the possible states that the creation request can be in. + + Attributes: + STATE_UNSPECIFIED (int): Invalid. + RUNNING (int): Request is being processed. + SUCCEEDED (int): The glossary was successfully deleted. + FAILED (int): Failed to delete the glossary. + CANCELLING (int): Request is in the process of being canceled after caller invoked + longrunning.Operations.CancelOperation on the request id. + CANCELLED (int): The glossary deletion request was successfully canceled. + """ + + STATE_UNSPECIFIED = 0 + RUNNING = 1 + SUCCEEDED = 2 + FAILED = 3 + CANCELLING = 4 + CANCELLED = 5 diff --git a/translate/google/cloud/translate_v3/gapic/translation_service_client.py b/translate/google/cloud/translate_v3/gapic/translation_service_client.py new file mode 100644 index 000000000000..c993c434b2ae --- /dev/null +++ b/translate/google/cloud/translate_v3/gapic/translation_service_client.py @@ -0,0 +1,1099 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.cloud.translation.v3 TranslationService API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.operation +import google.api_core.operations_v1 +import google.api_core.page_iterator +import google.api_core.path_template +import google.api_core.protobuf_helpers +import grpc + +from google.cloud.translate_v3.gapic import translation_service_client_config +from google.cloud.translate_v3.gapic.transports import ( + translation_service_grpc_transport, +) +from google.cloud.translate_v3.proto import translation_service_pb2 +from google.cloud.translate_v3.proto import translation_service_pb2_grpc +from google.longrunning import operations_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-translate" +).version + + +class TranslationServiceClient(object): + """Provides natural language translation operations.""" + + SERVICE_ADDRESS = "translate.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.cloud.translation.v3.TranslationService" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranslationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def glossary_path(cls, project, location, glossary): + """Return a fully-qualified glossary string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/glossaries/{glossary}", + project=project, + location=location, + glossary=glossary, + ) + + @classmethod + def location_path(cls, project, location): + """Return a fully-qualified location string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}", + project=project, + location=location, + ) + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.TranslationServiceGrpcTransport, + Callable[[~.Credentials, type], ~.TranslationServiceGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = translation_service_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=translation_service_grpc_transport.TranslationServiceGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = translation_service_grpc_transport.TranslationServiceGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def translate_text( + self, + contents, + target_language_code, + parent, + mime_type=None, + source_language_code=None, + model=None, + glossary_config=None, + labels=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Translates input text and returns translated text. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> # TODO: Initialize `contents`: + >>> contents = [] + >>> + >>> # TODO: Initialize `target_language_code`: + >>> target_language_code = '' + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> response = client.translate_text(contents, target_language_code, parent) + + Args: + contents (list[str]): Required. The content of the input in string format. + We recommend the total content be less than 30k codepoints. + Use BatchTranslateText for larger text. + target_language_code (str): Required. The BCP-47 language code to use for translation of the input + text, set to one of the language codes listed in Language Support. + parent (str): Required. Project or location to make a call. Must refer to a caller's + project. + + Format: ``projects/{project-number-or-id}`` or + ``projects/{project-number-or-id}/locations/{location-id}``. + + For global calls, use + ``projects/{project-number-or-id}/locations/global`` or + ``projects/{project-number-or-id}``. + + Non-global location is required for requests using AutoML models or + custom glossaries. + + Models and glossaries must be within the same region (have same + location-id), otherwise an INVALID\_ARGUMENT (400) error is returned. + mime_type (str): Optional. The format of the source text, for example, "text/html", + "text/plain". If left blank, the MIME type defaults to "text/html". + source_language_code (str): Optional. The BCP-47 language code of the input text if + known, for example, "en-US" or "sr-Latn". Supported language codes are + listed in Language Support. If the source language isn't specified, the API + attempts to identify the source language automatically and returns the + source language within the response. + model (str): Optional. The ``model`` type requested for this translation. + + The format depends on model type: + + - AutoML Translation models: + ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}`` + + - General (built-in) models: + ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``, + ``projects/{project-number-or-id}/locations/{location-id}/models/general/base`` + + For global (non-regionalized) requests, use ``location-id`` ``global``. + For example, + ``projects/{project-number-or-id}/locations/global/models/general/nmt``. + + If missing, the system decides which google base model to use. + glossary_config (Union[dict, ~google.cloud.translate_v3.types.TranslateTextGlossaryConfig]): Optional. Glossary to be applied. The glossary must be within the same + region (have the same location-id) as the model, otherwise an + INVALID\_ARGUMENT (400) error is returned. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.translate_v3.types.TranslateTextGlossaryConfig` + labels (dict[str -> str]): Optional. The labels with user-defined metadata for the request. + + Label keys and values can be no longer than 63 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + Label values are optional. Label keys must start with a letter. + + See https://cloud.google.com/translate/docs/labels for more information. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types.TranslateTextResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "translate_text" not in self._inner_api_calls: + self._inner_api_calls[ + "translate_text" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.translate_text, + default_retry=self._method_configs["TranslateText"].retry, + default_timeout=self._method_configs["TranslateText"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.TranslateTextRequest( + contents=contents, + target_language_code=target_language_code, + parent=parent, + mime_type=mime_type, + source_language_code=source_language_code, + model=model, + glossary_config=glossary_config, + labels=labels, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["translate_text"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def detect_language( + self, + parent, + model=None, + content=None, + mime_type=None, + labels=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Detects the language of text within a request. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> response = client.detect_language(parent) + + Args: + parent (str): Required. Project or location to make a call. Must refer to a caller's + project. + + Format: ``projects/{project-number-or-id}/locations/{location-id}`` or + ``projects/{project-number-or-id}``. + + For global calls, use + ``projects/{project-number-or-id}/locations/global`` or + ``projects/{project-number-or-id}``. + + Only models within the same region (has same location-id) can be used. + Otherwise an INVALID\_ARGUMENT (400) error is returned. + model (str): Optional. The language detection model to be used. + + Format: + ``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/{model-id}`` + + Only one language detection model is currently supported: + ``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/default``. + + If not specified, the default model is used. + content (str): The content of the input stored as a string. + mime_type (str): Optional. The format of the source text, for example, "text/html", + "text/plain". If left blank, the MIME type defaults to "text/html". + labels (dict[str -> str]): Optional. The labels with user-defined metadata for the request. + + Label keys and values can be no longer than 63 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + Label values are optional. Label keys must start with a letter. + + See https://cloud.google.com/translate/docs/labels for more information. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types.DetectLanguageResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "detect_language" not in self._inner_api_calls: + self._inner_api_calls[ + "detect_language" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.detect_language, + default_retry=self._method_configs["DetectLanguage"].retry, + default_timeout=self._method_configs["DetectLanguage"].timeout, + client_info=self._client_info, + ) + + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof(content=content) + + request = translation_service_pb2.DetectLanguageRequest( + parent=parent, + model=model, + content=content, + mime_type=mime_type, + labels=labels, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["detect_language"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_supported_languages( + self, + parent, + display_language_code=None, + model=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Returns a list of supported languages for translation. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> response = client.get_supported_languages(parent) + + Args: + parent (str): Required. Project or location to make a call. Must refer to a caller's + project. + + Format: ``projects/{project-number-or-id}`` or + ``projects/{project-number-or-id}/locations/{location-id}``. + + For global calls, use + ``projects/{project-number-or-id}/locations/global`` or + ``projects/{project-number-or-id}``. + + Non-global location is required for AutoML models. + + Only models within the same region (have same location-id) can be used, + otherwise an INVALID\_ARGUMENT (400) error is returned. + display_language_code (str): Optional. The language to use to return localized, human readable names + of supported languages. If missing, then display names are not returned + in a response. + model (str): Optional. Get supported languages of this model. + + The format depends on model type: + + - AutoML Translation models: + ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}`` + + - General (built-in) models: + ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``, + ``projects/{project-number-or-id}/locations/{location-id}/models/general/base`` + + Returns languages supported by the specified model. If missing, we get + supported languages of Google general base (PBMT) model. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types.SupportedLanguages` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_supported_languages" not in self._inner_api_calls: + self._inner_api_calls[ + "get_supported_languages" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_supported_languages, + default_retry=self._method_configs["GetSupportedLanguages"].retry, + default_timeout=self._method_configs["GetSupportedLanguages"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.GetSupportedLanguagesRequest( + parent=parent, display_language_code=display_language_code, model=model + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_supported_languages"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def batch_translate_text( + self, + parent, + source_language_code, + target_language_codes, + input_configs, + output_config, + models=None, + glossaries=None, + labels=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Translates a large volume of text in asynchronous batch mode. + This function provides real-time output as the inputs are being processed. + If caller cancels a request, the partial results (for an input file, it's + all or nothing) may still be available on the specified output location. + + This call returns immediately and you can + use google.longrunning.Operation.name to poll the status of the call. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize `source_language_code`: + >>> source_language_code = '' + >>> + >>> # TODO: Initialize `target_language_codes`: + >>> target_language_codes = [] + >>> + >>> # TODO: Initialize `input_configs`: + >>> input_configs = [] + >>> + >>> # TODO: Initialize `output_config`: + >>> output_config = {} + >>> + >>> response = client.batch_translate_text(parent, source_language_code, target_language_codes, input_configs, output_config) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + parent (str): Required. Location to make a call. Must refer to a caller's project. + + Format: ``projects/{project-number-or-id}/locations/{location-id}``. + + The ``global`` location is not supported for batch translation. + + Only AutoML Translation models or glossaries within the same region + (have the same location-id) can be used, otherwise an INVALID\_ARGUMENT + (400) error is returned. + source_language_code (str): Required. Source language code. + target_language_codes (list[str]): Required. Specify up to 10 language codes here. + input_configs (list[Union[dict, ~google.cloud.translate_v3.types.InputConfig]]): Required. Input configurations. + The total number of files matched should be <= 1000. + The total content size should be <= 100M Unicode codepoints. + The files must use UTF-8 encoding. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.translate_v3.types.InputConfig` + output_config (Union[dict, ~google.cloud.translate_v3.types.OutputConfig]): Required. Output configuration. + If 2 input configs match to the same file (that is, same input path), + we don't generate output for duplicate inputs. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.translate_v3.types.OutputConfig` + models (dict[str -> str]): Optional. The models to use for translation. Map's key is target + language code. Map's value is model name. Value can be a built-in + general model, or an AutoML Translation model. + + The value format depends on model type: + + - AutoML Translation models: + ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}`` + + - General (built-in) models: + ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``, + ``projects/{project-number-or-id}/locations/{location-id}/models/general/base`` + + If the map is empty or a specific model is not requested for a language + pair, then default google model (nmt) is used. + glossaries (dict[str -> Union[dict, ~google.cloud.translate_v3.types.TranslateTextGlossaryConfig]]): Optional. Glossaries to be applied for translation. + It's keyed by target language code. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.translate_v3.types.TranslateTextGlossaryConfig` + labels (dict[str -> str]): Optional. The labels with user-defined metadata for the request. + + Label keys and values can be no longer than 63 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + Label values are optional. Label keys must start with a letter. + + See https://cloud.google.com/translate/docs/labels for more information. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "batch_translate_text" not in self._inner_api_calls: + self._inner_api_calls[ + "batch_translate_text" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.batch_translate_text, + default_retry=self._method_configs["BatchTranslateText"].retry, + default_timeout=self._method_configs["BatchTranslateText"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.BatchTranslateTextRequest( + parent=parent, + source_language_code=source_language_code, + target_language_codes=target_language_codes, + input_configs=input_configs, + output_config=output_config, + models=models, + glossaries=glossaries, + labels=labels, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["batch_translate_text"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + translation_service_pb2.BatchTranslateResponse, + metadata_type=translation_service_pb2.BatchTranslateMetadata, + ) + + def create_glossary( + self, + parent, + glossary, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates a glossary and returns the long-running operation. Returns + NOT\_FOUND, if the project doesn't exist. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize `glossary`: + >>> glossary = {} + >>> + >>> response = client.create_glossary(parent, glossary) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + parent (str): Required. The project name. + glossary (Union[dict, ~google.cloud.translate_v3.types.Glossary]): Required. The glossary to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.translate_v3.types.Glossary` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_glossary" not in self._inner_api_calls: + self._inner_api_calls[ + "create_glossary" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_glossary, + default_retry=self._method_configs["CreateGlossary"].retry, + default_timeout=self._method_configs["CreateGlossary"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.CreateGlossaryRequest( + parent=parent, glossary=glossary + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["create_glossary"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + translation_service_pb2.Glossary, + metadata_type=translation_service_pb2.CreateGlossaryMetadata, + ) + + def list_glossaries( + self, + parent, + page_size=None, + filter_=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists glossaries in a project. Returns NOT\_FOUND, if the project + doesn't exist. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # Iterate over all results + >>> for element in client.list_glossaries(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_glossaries(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The name of the project from which to list all of the glossaries. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + filter_ (str): Optional. Filter specifying constraints of a list operation. + Filtering is not supported yet, and the parameter currently has no effect. + If missing, no filtering is performed. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.translate_v3.types.Glossary` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_glossaries" not in self._inner_api_calls: + self._inner_api_calls[ + "list_glossaries" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_glossaries, + default_retry=self._method_configs["ListGlossaries"].retry, + default_timeout=self._method_configs["ListGlossaries"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.ListGlossariesRequest( + parent=parent, page_size=page_size, filter=filter_ + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_glossaries"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="glossaries", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def get_glossary( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets a glossary. Returns NOT\_FOUND, if the glossary doesn't exist. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> name = client.glossary_path('[PROJECT]', '[LOCATION]', '[GLOSSARY]') + >>> + >>> response = client.get_glossary(name) + + Args: + name (str): Required. The name of the glossary to retrieve. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types.Glossary` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_glossary" not in self._inner_api_calls: + self._inner_api_calls[ + "get_glossary" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_glossary, + default_retry=self._method_configs["GetGlossary"].retry, + default_timeout=self._method_configs["GetGlossary"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.GetGlossaryRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_glossary"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_glossary( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes a glossary, or cancels glossary construction if the glossary + isn't created yet. Returns NOT\_FOUND, if the glossary doesn't exist. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> name = client.glossary_path('[PROJECT]', '[LOCATION]', '[GLOSSARY]') + >>> + >>> response = client.delete_glossary(name) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): Required. The name of the glossary to delete. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_glossary" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_glossary" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_glossary, + default_retry=self._method_configs["DeleteGlossary"].retry, + default_timeout=self._method_configs["DeleteGlossary"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.DeleteGlossaryRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["delete_glossary"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + translation_service_pb2.DeleteGlossaryResponse, + metadata_type=translation_service_pb2.DeleteGlossaryMetadata, + ) diff --git a/translate/google/cloud/translate_v3/gapic/translation_service_client_config.py b/translate/google/cloud/translate_v3/gapic/translation_service_client_config.py new file mode 100644 index 000000000000..0e611ae41d1a --- /dev/null +++ b/translate/google/cloud/translate_v3/gapic/translation_service_client_config.py @@ -0,0 +1,63 @@ +config = { + "interfaces": { + "google.cloud.translation.v3.TranslationService": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "TranslateText": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "DetectLanguage": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetSupportedLanguages": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "BatchTranslateText": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "CreateGlossary": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "ListGlossaries": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "GetGlossary": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteGlossary": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/translate/google/cloud/translate_v3/gapic/transports/__init__.py b/translate/google/cloud/translate_v3/gapic/transports/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/translate/google/cloud/translate_v3/gapic/transports/translation_service_grpc_transport.py b/translate/google/cloud/translate_v3/gapic/transports/translation_service_grpc_transport.py new file mode 100644 index 000000000000..ed038e376db0 --- /dev/null +++ b/translate/google/cloud/translate_v3/gapic/transports/translation_service_grpc_transport.py @@ -0,0 +1,234 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers +import google.api_core.operations_v1 + +from google.cloud.translate_v3.proto import translation_service_pb2_grpc + + +class TranslationServiceGrpcTransport(object): + """gRPC transport class providing stubs for + google.cloud.translation.v3 TranslationService API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-translation", + ) + + def __init__( + self, channel=None, credentials=None, address="translate.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + "translation_service_stub": translation_service_pb2_grpc.TranslationServiceStub( + channel + ) + } + + # Because this API includes a method that returns a + # long-running operation (proto: google.longrunning.Operation), + # instantiate an LRO client. + self._operations_client = google.api_core.operations_v1.OperationsClient( + channel + ) + + @classmethod + def create_channel( + cls, address="translate.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def translate_text(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.translate_text`. + + Translates input text and returns translated text. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].TranslateText + + @property + def detect_language(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.detect_language`. + + Detects the language of text within a request. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].DetectLanguage + + @property + def get_supported_languages(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.get_supported_languages`. + + Returns a list of supported languages for translation. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].GetSupportedLanguages + + @property + def batch_translate_text(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.batch_translate_text`. + + Translates a large volume of text in asynchronous batch mode. + This function provides real-time output as the inputs are being processed. + If caller cancels a request, the partial results (for an input file, it's + all or nothing) may still be available on the specified output location. + + This call returns immediately and you can + use google.longrunning.Operation.name to poll the status of the call. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].BatchTranslateText + + @property + def create_glossary(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.create_glossary`. + + Creates a glossary and returns the long-running operation. Returns + NOT\_FOUND, if the project doesn't exist. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].CreateGlossary + + @property + def list_glossaries(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.list_glossaries`. + + Lists glossaries in a project. Returns NOT\_FOUND, if the project + doesn't exist. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].ListGlossaries + + @property + def get_glossary(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.get_glossary`. + + Gets a glossary. Returns NOT\_FOUND, if the glossary doesn't exist. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].GetGlossary + + @property + def delete_glossary(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.delete_glossary`. + + Deletes a glossary, or cancels glossary construction if the glossary + isn't created yet. Returns NOT\_FOUND, if the glossary doesn't exist. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].DeleteGlossary diff --git a/translate/google/cloud/translate_v3/proto/__init__.py b/translate/google/cloud/translate_v3/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/translate/google/cloud/translate_v3/proto/translation_service.proto b/translate/google/cloud/translate_v3/proto/translation_service.proto new file mode 100644 index 000000000000..ad43831c29b0 --- /dev/null +++ b/translate/google/cloud/translate_v3/proto/translation_service.proto @@ -0,0 +1,926 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.translation.v3; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/timestamp.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.Translate.V3"; +option go_package = "google.golang.org/genproto/googleapis/cloud/translate/v3;translate"; +option java_multiple_files = true; +option java_outer_classname = "TranslationServiceProto"; +option java_package = "com.google.cloud.translate.v3"; +option php_namespace = "Google\\Cloud\\Translate\\V3"; +option ruby_package = "Google::Cloud::Translate::V3"; + +// Proto file for the Cloud Translation API (v3 GA). + +// Provides natural language translation operations. +service TranslationService { + option (google.api.default_host) = "translate.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/cloud-translation"; + + // Translates input text and returns translated text. + rpc TranslateText(TranslateTextRequest) returns (TranslateTextResponse) { + option (google.api.http) = { + post: "/v3/{parent=projects/*/locations/*}:translateText" + body: "*" + additional_bindings { + post: "/v3/{parent=projects/*}:translateText" + body: "*" + } + }; + option (google.api.method_signature) = + "parent,target_language_code,contents"; + option (google.api.method_signature) = + "parent,model,mime_type,source_language_code,target_language_code,contents"; + } + + // Detects the language of text within a request. + rpc DetectLanguage(DetectLanguageRequest) returns (DetectLanguageResponse) { + option (google.api.http) = { + post: "/v3/{parent=projects/*/locations/*}:detectLanguage" + body: "*" + additional_bindings { + post: "/v3/{parent=projects/*}:detectLanguage" + body: "*" + } + }; + option (google.api.method_signature) = "parent,model,mime_type,content"; + } + + // Returns a list of supported languages for translation. + rpc GetSupportedLanguages(GetSupportedLanguagesRequest) + returns (SupportedLanguages) { + option (google.api.http) = { + get: "/v3/{parent=projects/*/locations/*}/supportedLanguages" + additional_bindings { get: "/v3/{parent=projects/*}/supportedLanguages" } + }; + option (google.api.method_signature) = "parent,model,display_language_code"; + } + + // Translates a large volume of text in asynchronous batch mode. + // This function provides real-time output as the inputs are being processed. + // If caller cancels a request, the partial results (for an input file, it's + // all or nothing) may still be available on the specified output location. + // + // This call returns immediately and you can + // use google.longrunning.Operation.name to poll the status of the call. + rpc BatchTranslateText(BatchTranslateTextRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v3/{parent=projects/*/locations/*}:batchTranslateText" + body: "*" + }; + option (google.longrunning.operation_info) = { + response_type: "BatchTranslateResponse" + metadata_type: "BatchTranslateMetadata" + }; + } + + // Creates a glossary and returns the long-running operation. Returns + // NOT_FOUND, if the project doesn't exist. + rpc CreateGlossary(CreateGlossaryRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v3/{parent=projects/*/locations/*}/glossaries" + body: "glossary" + }; + option (google.api.method_signature) = "parent,glossary"; + option (google.longrunning.operation_info) = { + response_type: "Glossary" + metadata_type: "CreateGlossaryMetadata" + }; + } + + // Lists glossaries in a project. Returns NOT_FOUND, if the project doesn't + // exist. + rpc ListGlossaries(ListGlossariesRequest) returns (ListGlossariesResponse) { + option (google.api.http) = { + get: "/v3/{parent=projects/*/locations/*}/glossaries" + }; + option (google.api.method_signature) = "parent"; + } + + // Gets a glossary. Returns NOT_FOUND, if the glossary doesn't + // exist. + rpc GetGlossary(GetGlossaryRequest) returns (Glossary) { + option (google.api.http) = { + get: "/v3/{name=projects/*/locations/*/glossaries/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Deletes a glossary, or cancels glossary construction + // if the glossary isn't created yet. + // Returns NOT_FOUND, if the glossary doesn't exist. + rpc DeleteGlossary(DeleteGlossaryRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + delete: "/v3/{name=projects/*/locations/*/glossaries/*}" + }; + option (google.api.method_signature) = "name"; + option (google.longrunning.operation_info) = { + response_type: "DeleteGlossaryResponse" + metadata_type: "DeleteGlossaryMetadata" + }; + } +} + +// Configures which glossary should be used for a specific target language, +// and defines options for applying that glossary. +message TranslateTextGlossaryConfig { + // Required. Specifies the glossary used for this translation. Use + // this format: projects/*/locations/*/glossaries/* + string glossary = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Indicates match is case-insensitive. + // Default value is false if missing. + bool ignore_case = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// The request message for synchronous translation. +message TranslateTextRequest { + // Required. The content of the input in string format. + // We recommend the total content be less than 30k codepoints. + // Use BatchTranslateText for larger text. + repeated string contents = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The format of the source text, for example, "text/html", + // "text/plain". If left blank, the MIME type defaults to "text/html". + string mime_type = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The BCP-47 language code of the input text if + // known, for example, "en-US" or "sr-Latn". Supported language codes are + // listed in Language Support. If the source language isn't specified, the API + // attempts to identify the source language automatically and returns the + // source language within the response. + string source_language_code = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The BCP-47 language code to use for translation of the input + // text, set to one of the language codes listed in Language Support. + string target_language_code = 5 [(google.api.field_behavior) = REQUIRED]; + + // Required. Project or location to make a call. Must refer to a caller's + // project. + // + // Format: `projects/{project-number-or-id}` or + // `projects/{project-number-or-id}/locations/{location-id}`. + // + // For global calls, use `projects/{project-number-or-id}/locations/global` or + // `projects/{project-number-or-id}`. + // + // Non-global location is required for requests using AutoML models or + // custom glossaries. + // + // Models and glossaries must be within the same region (have same + // location-id), otherwise an INVALID_ARGUMENT (400) error is returned. + string parent = 8 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Optional. The `model` type requested for this translation. + // + // The format depends on model type: + // + // - AutoML Translation models: + // `projects/{project-number-or-id}/locations/{location-id}/models/{model-id}` + // + // - General (built-in) models: + // `projects/{project-number-or-id}/locations/{location-id}/models/general/nmt`, + // `projects/{project-number-or-id}/locations/{location-id}/models/general/base` + // + // + // For global (non-regionalized) requests, use `location-id` `global`. + // For example, + // `projects/{project-number-or-id}/locations/global/models/general/nmt`. + // + // If missing, the system decides which google base model to use. + string model = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Glossary to be applied. The glossary must be + // within the same region (have the same location-id) as the model, otherwise + // an INVALID_ARGUMENT (400) error is returned. + TranslateTextGlossaryConfig glossary_config = 7 + [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels with user-defined metadata for the request. + // + // Label keys and values can be no longer than 63 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://cloud.google.com/translate/docs/labels for more information. + map labels = 10 [(google.api.field_behavior) = OPTIONAL]; +} + +message TranslateTextResponse { + // Text translation responses with no glossary applied. + // This field has the same length as + // [`contents`][google.cloud.translation.v3.TranslateTextRequest.contents]. + repeated Translation translations = 1; + + // Text translation responses if a glossary is provided in the request. + // This can be the same as + // [`translations`][google.cloud.translation.v3.TranslateTextResponse.translations] + // if no terms apply. This field has the same length as + // [`contents`][google.cloud.translation.v3.TranslateTextRequest.contents]. + repeated Translation glossary_translations = 3; +} + +// A single translation response. +message Translation { + // Text translated into the target language. + string translated_text = 1; + + // Only present when `model` is present in the request. + // `model` here is normalized to have project number. + // + // For example: + // If the `model` requested in TranslationTextRequest is + // `projects/{project-id}/locations/{location-id}/models/general/nmt` then + // `model` here would be normalized to + // `projects/{project-number}/locations/{location-id}/models/general/nmt`. + string model = 2; + + // The BCP-47 language code of source text in the initial request, detected + // automatically, if no source language was passed within the initial + // request. If the source language was passed, auto-detection of the language + // does not occur and this field is empty. + string detected_language_code = 4; + + // The `glossary_config` used for this translation. + TranslateTextGlossaryConfig glossary_config = 3; +} + +// The request message for language detection. +message DetectLanguageRequest { + // Required. Project or location to make a call. Must refer to a caller's + // project. + // + // Format: `projects/{project-number-or-id}/locations/{location-id}` or + // `projects/{project-number-or-id}`. + // + // For global calls, use `projects/{project-number-or-id}/locations/global` or + // `projects/{project-number-or-id}`. + // + // Only models within the same region (has same location-id) can be used. + // Otherwise an INVALID_ARGUMENT (400) error is returned. + string parent = 5 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Optional. The language detection model to be used. + // + // Format: + // `projects/{project-number-or-id}/locations/{location-id}/models/language-detection/{model-id}` + // + // Only one language detection model is currently supported: + // `projects/{project-number-or-id}/locations/{location-id}/models/language-detection/default`. + // + // If not specified, the default model is used. + string model = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The source of the document from which to detect the language. + oneof source { + // The content of the input stored as a string. + string content = 1; + } + + // Optional. The format of the source text, for example, "text/html", + // "text/plain". If left blank, the MIME type defaults to "text/html". + string mime_type = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels with user-defined metadata for the request. + // + // Label keys and values can be no longer than 63 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://cloud.google.com/translate/docs/labels for more information. + map labels = 6 [(google.api.field_behavior) = OPTIONAL]; +} + +// The response message for language detection. +message DetectedLanguage { + // The BCP-47 language code of source content in the request, detected + // automatically. + string language_code = 1; + + // The confidence of the detection result for this language. + float confidence = 2; +} + +// The response message for language detection. +message DetectLanguageResponse { + // A list of detected languages sorted by detection confidence in descending + // order. The most probable language first. + repeated DetectedLanguage languages = 1; +} + +// The request message for discovering supported languages. +message GetSupportedLanguagesRequest { + // Required. Project or location to make a call. Must refer to a caller's + // project. + // + // Format: `projects/{project-number-or-id}` or + // `projects/{project-number-or-id}/locations/{location-id}`. + // + // For global calls, use `projects/{project-number-or-id}/locations/global` or + // `projects/{project-number-or-id}`. + // + // Non-global location is required for AutoML models. + // + // Only models within the same region (have same location-id) can be used, + // otherwise an INVALID_ARGUMENT (400) error is returned. + string parent = 3 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Optional. The language to use to return localized, human readable names + // of supported languages. If missing, then display names are not returned + // in a response. + string display_language_code = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Get supported languages of this model. + // + // The format depends on model type: + // + // - AutoML Translation models: + // `projects/{project-number-or-id}/locations/{location-id}/models/{model-id}` + // + // - General (built-in) models: + // `projects/{project-number-or-id}/locations/{location-id}/models/general/nmt`, + // `projects/{project-number-or-id}/locations/{location-id}/models/general/base` + // + // + // Returns languages supported by the specified model. + // If missing, we get supported languages of Google general base (PBMT) model. + string model = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// The response message for discovering supported languages. +message SupportedLanguages { + // A list of supported language responses. This list contains an entry + // for each language the Translation API supports. + repeated SupportedLanguage languages = 1; +} + +// A single supported language response corresponds to information related +// to one supported language. +message SupportedLanguage { + // Supported language code, generally consisting of its ISO 639-1 + // identifier, for example, 'en', 'ja'. In certain cases, BCP-47 codes + // including language and region identifiers are returned (for example, + // 'zh-TW' and 'zh-CN') + string language_code = 1; + + // Human readable name of the language localized in the display language + // specified in the request. + string display_name = 2; + + // Can be used as source language. + bool support_source = 3; + + // Can be used as target language. + bool support_target = 4; +} + +// The Google Cloud Storage location for the input content. +message GcsSource { + // Required. Source data URI. For example, `gs://my_bucket/my_object`. + string input_uri = 1; +} + +// Input configuration for BatchTranslateText request. +message InputConfig { + // Optional. Can be "text/plain" or "text/html". + // For `.tsv`, "text/html" is used if mime_type is missing. + // For `.html`, this field must be "text/html" or empty. + // For `.txt`, this field must be "text/plain" or empty. + string mime_type = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Specify the input. + oneof source { + // Required. Google Cloud Storage location for the source input. + // This can be a single file (for example, + // `gs://translation-test/input.tsv`) or a wildcard (for example, + // `gs://translation-test/*`). If a file extension is `.tsv`, it can + // contain either one or two columns. The first column (optional) is the id + // of the text request. If the first column is missing, we use the row + // number (0-based) from the input file as the ID in the output file. The + // second column is the actual text to be + // translated. We recommend each row be <= 10K Unicode codepoints, + // otherwise an error might be returned. + // Note that the input tsv must be RFC 4180 compliant. + // + // You could use https://github.com/Clever/csvlint to check potential + // formatting errors in your tsv file. + // csvlint --delimiter='\t' your_input_file.tsv + // + // The other supported file extensions are `.txt` or `.html`, which is + // treated as a single large chunk of text. + GcsSource gcs_source = 2; + } +} + +// The Google Cloud Storage location for the output content. +message GcsDestination { + // Required. There must be no files under 'output_uri_prefix'. + // 'output_uri_prefix' must end with "/" and start with "gs://", otherwise an + // INVALID_ARGUMENT (400) error is returned. + string output_uri_prefix = 1; +} + +// Output configuration for BatchTranslateText request. +message OutputConfig { + // Required. The destination of output. + oneof destination { + // Google Cloud Storage destination for output content. + // For every single input file (for example, gs://a/b/c.[extension]), we + // generate at most 2 * n output files. (n is the # of target_language_codes + // in the BatchTranslateTextRequest). + // + // Output files (tsv) generated are compliant with RFC 4180 except that + // record delimiters are '\n' instead of '\r\n'. We don't provide any way to + // change record delimiters. + // + // While the input files are being processed, we write/update an index file + // 'index.csv' under 'output_uri_prefix' (for example, + // gs://translation-test/index.csv) The index file is generated/updated as + // new files are being translated. The format is: + // + // input_file,target_language_code,translations_file,errors_file, + // glossary_translations_file,glossary_errors_file + // + // input_file is one file we matched using gcs_source.input_uri. + // target_language_code is provided in the request. + // translations_file contains the translations. (details provided below) + // errors_file contains the errors during processing of the file. (details + // below). Both translations_file and errors_file could be empty + // strings if we have no content to output. + // glossary_translations_file and glossary_errors_file are always empty + // strings if the input_file is tsv. They could also be empty if we have no + // content to output. + // + // Once a row is present in index.csv, the input/output matching never + // changes. Callers should also expect all the content in input_file are + // processed and ready to be consumed (that is, no partial output file is + // written). + // + // The format of translations_file (for target language code 'trg') is: + // gs://translation_test/a_b_c_'trg'_translations.[extension] + // + // If the input file extension is tsv, the output has the following + // columns: + // Column 1: ID of the request provided in the input, if it's not + // provided in the input, then the input row number is used (0-based). + // Column 2: source sentence. + // Column 3: translation without applying a glossary. Empty string if there + // is an error. + // Column 4 (only present if a glossary is provided in the request): + // translation after applying the glossary. Empty string if there is an + // error applying the glossary. Could be same string as column 3 if there is + // no glossary applied. + // + // If input file extension is a txt or html, the translation is directly + // written to the output file. If glossary is requested, a separate + // glossary_translations_file has format of + // gs://translation_test/a_b_c_'trg'_glossary_translations.[extension] + // + // The format of errors file (for target language code 'trg') is: + // gs://translation_test/a_b_c_'trg'_errors.[extension] + // + // If the input file extension is tsv, errors_file contains the following: + // Column 1: ID of the request provided in the input, if it's not + // provided in the input, then the input row number is used (0-based). + // Column 2: source sentence. + // Column 3: Error detail for the translation. Could be empty. + // Column 4 (only present if a glossary is provided in the request): + // Error when applying the glossary. + // + // If the input file extension is txt or html, glossary_error_file will be + // generated that contains error details. glossary_error_file has format of + // gs://translation_test/a_b_c_'trg'_glossary_errors.[extension] + GcsDestination gcs_destination = 1; + } +} + +// The batch translation request. +message BatchTranslateTextRequest { + // Required. Location to make a call. Must refer to a caller's project. + // + // Format: `projects/{project-number-or-id}/locations/{location-id}`. + // + // The `global` location is not supported for batch translation. + // + // Only AutoML Translation models or glossaries within the same region (have + // the same location-id) can be used, otherwise an INVALID_ARGUMENT (400) + // error is returned. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. Source language code. + string source_language_code = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. Specify up to 10 language codes here. + repeated string target_language_codes = 3 + [(google.api.field_behavior) = REQUIRED]; + + // Optional. The models to use for translation. Map's key is target language + // code. Map's value is model name. Value can be a built-in general model, + // or an AutoML Translation model. + // + // The value format depends on model type: + // + // - AutoML Translation models: + // `projects/{project-number-or-id}/locations/{location-id}/models/{model-id}` + // + // - General (built-in) models: + // `projects/{project-number-or-id}/locations/{location-id}/models/general/nmt`, + // `projects/{project-number-or-id}/locations/{location-id}/models/general/base` + // + // + // If the map is empty or a specific model is + // not requested for a language pair, then default google model (nmt) is used. + map models = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Input configurations. + // The total number of files matched should be <= 1000. + // The total content size should be <= 100M Unicode codepoints. + // The files must use UTF-8 encoding. + repeated InputConfig input_configs = 5 + [(google.api.field_behavior) = REQUIRED]; + + // Required. Output configuration. + // If 2 input configs match to the same file (that is, same input path), + // we don't generate output for duplicate inputs. + OutputConfig output_config = 6 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Glossaries to be applied for translation. + // It's keyed by target language code. + map glossaries = 7 + [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels with user-defined metadata for the request. + // + // Label keys and values can be no longer than 63 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://cloud.google.com/translate/docs/labels for more information. + map labels = 9 [(google.api.field_behavior) = OPTIONAL]; +} + +// State metadata for the batch translation operation. +message BatchTranslateMetadata { + // State of the job. + enum State { + // Invalid. + STATE_UNSPECIFIED = 0; + + // Request is being processed. + RUNNING = 1; + + // The batch is processed, and at least one item was successfully + // processed. + SUCCEEDED = 2; + + // The batch is done and no item was successfully processed. + FAILED = 3; + + // Request is in the process of being canceled after caller invoked + // longrunning.Operations.CancelOperation on the request id. + CANCELLING = 4; + + // The batch is done after the user has called the + // longrunning.Operations.CancelOperation. Any records processed before the + // cancel command are output as specified in the request. + CANCELLED = 5; + } + + // The state of the operation. + State state = 1; + + // Number of successfully translated characters so far (Unicode codepoints). + int64 translated_characters = 2; + + // Number of characters that have failed to process so far (Unicode + // codepoints). + int64 failed_characters = 3; + + // Total number of characters (Unicode codepoints). + // This is the total number of codepoints from input files times the number of + // target languages and appears here shortly after the call is submitted. + int64 total_characters = 4; + + // Time when the operation was submitted. + google.protobuf.Timestamp submit_time = 5; +} + +// Stored in the +// [google.longrunning.Operation.response][google.longrunning.Operation.response] +// field returned by BatchTranslateText if at least one sentence is translated +// successfully. +message BatchTranslateResponse { + // Total number of characters (Unicode codepoints). + int64 total_characters = 1; + + // Number of successfully translated characters (Unicode codepoints). + int64 translated_characters = 2; + + // Number of characters that have failed to process (Unicode codepoints). + int64 failed_characters = 3; + + // Time when the operation was submitted. + google.protobuf.Timestamp submit_time = 4; + + // The time when the operation is finished and + // [google.longrunning.Operation.done][google.longrunning.Operation.done] is + // set to true. + google.protobuf.Timestamp end_time = 5; +} + +// Input configuration for glossaries. +message GlossaryInputConfig { + // Required. Specify the input. + oneof source { + // Required. Google Cloud Storage location of glossary data. + // File format is determined based on the filename extension. API returns + // [google.rpc.Code.INVALID_ARGUMENT] for unsupported URI-s and file + // formats. Wildcards are not allowed. This must be a single file in one of + // the following formats: + // + // For unidirectional glossaries: + // + // - TSV/CSV (`.tsv`/`.csv`): 2 column file, tab- or comma-separated. + // The first column is source text. The second column is target text. + // The file must not contain headers. That is, the first row is data, not + // column names. + // + // - TMX (`.tmx`): TMX file with parallel data defining source/target term + // pairs. + // + // For equivalent term sets glossaries: + // + // - CSV (`.csv`): Multi-column CSV file defining equivalent glossary terms + // in multiple languages. The format is defined for Google Translation + // Toolkit and documented in [Use a + // glossary](https://support.google.com/translatortoolkit/answer/6306379?hl=en). + GcsSource gcs_source = 1; + } +} + +// Represents a glossary built from user provided data. +message Glossary { + option (google.api.resource) = { + type: "translate.googleapis.com/Glossary" + pattern: "projects/{project}/locations/{location}/glossaries/{glossary}" + }; + + // Used with unidirectional glossaries. + message LanguageCodePair { + // Required. The BCP-47 language code of the input text, for example, + // "en-US". Expected to be an exact match for GlossaryTerm.language_code. + string source_language_code = 1; + + // Required. The BCP-47 language code for translation output, for example, + // "zh-CN". Expected to be an exact match for GlossaryTerm.language_code. + string target_language_code = 2; + } + + // Used with equivalent term set glossaries. + message LanguageCodesSet { + // The BCP-47 language code(s) for terms defined in the glossary. + // All entries are unique. The list contains at least two entries. + // Expected to be an exact match for GlossaryTerm.language_code. + repeated string language_codes = 1; + } + + // Required. The resource name of the glossary. Glossary names have the form + // `projects/{project-number-or-id}/locations/{location-id}/glossaries/{glossary-id}`. + string name = 1; + + // Languages supported by the glossary. + oneof languages { + // Used with unidirectional glossaries. + LanguageCodePair language_pair = 3; + + // Used with equivalent term set glossaries. + LanguageCodesSet language_codes_set = 4; + } + + // Required. Provides examples to build the glossary from. + // Total glossary must not exceed 10M Unicode codepoints. + GlossaryInputConfig input_config = 5; + + // Output only. The number of entries defined in the glossary. + int32 entry_count = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. When CreateGlossary was called. + google.protobuf.Timestamp submit_time = 7 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. When the glossary creation was finished. + google.protobuf.Timestamp end_time = 8 + [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Request message for CreateGlossary. +message CreateGlossaryRequest { + // Required. The project name. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The glossary to create. + Glossary glossary = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for GetGlossary. +message GetGlossaryRequest { + // Required. The name of the glossary to retrieve. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "translate.googleapis.com/Glossary" + } + ]; +} + +// Request message for DeleteGlossary. +message DeleteGlossaryRequest { + // Required. The name of the glossary to delete. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "translate.googleapis.com/Glossary" + } + ]; +} + +// Request message for ListGlossaries. +message ListGlossariesRequest { + // Required. The name of the project from which to list all of the glossaries. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Optional. Requested page size. The server may return fewer glossaries than + // requested. If unspecified, the server picks an appropriate default. + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A token identifying a page of results the server should return. + // Typically, this is the value of [ListGlossariesResponse.next_page_token] + // returned from the previous call to `ListGlossaries` method. + // The first page is returned if `page_token`is empty or missing. + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Filter specifying constraints of a list operation. + // Filtering is not supported yet, and the parameter currently has no effect. + // If missing, no filtering is performed. + string filter = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response message for ListGlossaries. +message ListGlossariesResponse { + // The list of glossaries for a project. + repeated Glossary glossaries = 1; + + // A token to retrieve a page of results. Pass this value in the + // [ListGlossariesRequest.page_token] field in the subsequent call to + // `ListGlossaries` method to retrieve the next page of results. + string next_page_token = 2; +} + +// Stored in the +// [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] +// field returned by CreateGlossary. +message CreateGlossaryMetadata { + // Enumerates the possible states that the creation request can be in. + enum State { + // Invalid. + STATE_UNSPECIFIED = 0; + + // Request is being processed. + RUNNING = 1; + + // The glossary was successfully created. + SUCCEEDED = 2; + + // Failed to create the glossary. + FAILED = 3; + + // Request is in the process of being canceled after caller invoked + // longrunning.Operations.CancelOperation on the request id. + CANCELLING = 4; + + // The glossary creation request was successfully canceled. + CANCELLED = 5; + } + + // The name of the glossary that is being created. + string name = 1; + + // The current state of the glossary creation operation. + State state = 2; + + // The time when the operation was submitted to the server. + google.protobuf.Timestamp submit_time = 3; +} + +// Stored in the +// [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] +// field returned by DeleteGlossary. +message DeleteGlossaryMetadata { + // Enumerates the possible states that the creation request can be in. + enum State { + // Invalid. + STATE_UNSPECIFIED = 0; + + // Request is being processed. + RUNNING = 1; + + // The glossary was successfully deleted. + SUCCEEDED = 2; + + // Failed to delete the glossary. + FAILED = 3; + + // Request is in the process of being canceled after caller invoked + // longrunning.Operations.CancelOperation on the request id. + CANCELLING = 4; + + // The glossary deletion request was successfully canceled. + CANCELLED = 5; + } + + // The name of the glossary that is being deleted. + string name = 1; + + // The current state of the glossary deletion operation. + State state = 2; + + // The time when the operation was submitted to the server. + google.protobuf.Timestamp submit_time = 3; +} + +// Stored in the +// [google.longrunning.Operation.response][google.longrunning.Operation.response] +// field returned by DeleteGlossary. +message DeleteGlossaryResponse { + // The name of the deleted glossary. + string name = 1; + + // The time when the operation was submitted to the server. + google.protobuf.Timestamp submit_time = 2; + + // The time when the glossary deletion is finished and + // [google.longrunning.Operation.done][google.longrunning.Operation.done] is + // set to true. + google.protobuf.Timestamp end_time = 3; +} diff --git a/translate/google/cloud/translate_v3/proto/translation_service_pb2.py b/translate/google/cloud/translate_v3/proto/translation_service_pb2.py new file mode 100644 index 000000000000..7bc7e15d284e --- /dev/null +++ b/translate/google/cloud/translate_v3/proto/translation_service_pb2.py @@ -0,0 +1,3902 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/translation_v3/proto/translation_service.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/translation_v3/proto/translation_service.proto", + package="google.cloud.translation.v3", + syntax="proto3", + serialized_options=_b( + "\n\035com.google.cloud.translate.v3B\027TranslationServiceProtoP\001ZBgoogle.golang.org/genproto/googleapis/cloud/translate/v3;translate\370\001\001\252\002\031Google.Cloud.Translate.V3\312\002\031Google\\Cloud\\Translate\\V3\352\002\034Google::Cloud::Translate::V3" + ), + serialized_pb=_b( + '\n;google/cloud/translation_v3/proto/translation_service.proto\x12\x1bgoogle.cloud.translation.v3\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"N\n\x1bTranslateTextGlossaryConfig\x12\x15\n\x08glossary\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bignore_case\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01"\xb5\x03\n\x14TranslateTextRequest\x12\x15\n\x08\x63ontents\x18\x01 \x03(\tB\x03\xe0\x41\x02\x12\x16\n\tmime_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12!\n\x14source_language_code\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12!\n\x14target_language_code\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x06parent\x18\x08 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x12\n\x05model\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12V\n\x0fglossary_config\x18\x07 \x01(\x0b\x32\x38.google.cloud.translation.v3.TranslateTextGlossaryConfigB\x03\xe0\x41\x01\x12R\n\x06labels\x18\n \x03(\x0b\x32=.google.cloud.translation.v3.TranslateTextRequest.LabelsEntryB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa0\x01\n\x15TranslateTextResponse\x12>\n\x0ctranslations\x18\x01 \x03(\x0b\x32(.google.cloud.translation.v3.Translation\x12G\n\x15glossary_translations\x18\x03 \x03(\x0b\x32(.google.cloud.translation.v3.Translation"\xa8\x01\n\x0bTranslation\x12\x17\n\x0ftranslated_text\x18\x01 \x01(\t\x12\r\n\x05model\x18\x02 \x01(\t\x12\x1e\n\x16\x64\x65tected_language_code\x18\x04 \x01(\t\x12Q\n\x0fglossary_config\x18\x03 \x01(\x0b\x32\x38.google.cloud.translation.v3.TranslateTextGlossaryConfig"\x9f\x02\n\x15\x44\x65tectLanguageRequest\x12\x39\n\x06parent\x18\x05 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x12\n\x05model\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x11\n\x07\x63ontent\x18\x01 \x01(\tH\x00\x12\x16\n\tmime_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12S\n\x06labels\x18\x06 \x03(\x0b\x32>.google.cloud.translation.v3.DetectLanguageRequest.LabelsEntryB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06source"=\n\x10\x44\x65tectedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02"Z\n\x16\x44\x65tectLanguageResponse\x12@\n\tlanguages\x18\x01 \x03(\x0b\x32-.google.cloud.translation.v3.DetectedLanguage"\x91\x01\n\x1cGetSupportedLanguagesRequest\x12\x39\n\x06parent\x18\x03 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12"\n\x15\x64isplay_language_code\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x12\n\x05model\x18\x02 \x01(\tB\x03\xe0\x41\x01"W\n\x12SupportedLanguages\x12\x41\n\tlanguages\x18\x01 \x03(\x0b\x32..google.cloud.translation.v3.SupportedLanguage"p\n\x11SupportedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x16\n\x0esupport_source\x18\x03 \x01(\x08\x12\x16\n\x0esupport_target\x18\x04 \x01(\x08"\x1e\n\tGcsSource\x12\x11\n\tinput_uri\x18\x01 \x01(\t"m\n\x0bInputConfig\x12\x16\n\tmime_type\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12<\n\ngcs_source\x18\x02 \x01(\x0b\x32&.google.cloud.translation.v3.GcsSourceH\x00\x42\x08\n\x06source"+\n\x0eGcsDestination\x12\x19\n\x11output_uri_prefix\x18\x01 \x01(\t"e\n\x0cOutputConfig\x12\x46\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32+.google.cloud.translation.v3.GcsDestinationH\x00\x42\r\n\x0b\x64\x65stination"\x88\x06\n\x19\x42\x61tchTranslateTextRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12!\n\x14source_language_code\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12"\n\x15target_language_codes\x18\x03 \x03(\tB\x03\xe0\x41\x02\x12W\n\x06models\x18\x04 \x03(\x0b\x32\x42.google.cloud.translation.v3.BatchTranslateTextRequest.ModelsEntryB\x03\xe0\x41\x01\x12\x44\n\rinput_configs\x18\x05 \x03(\x0b\x32(.google.cloud.translation.v3.InputConfigB\x03\xe0\x41\x02\x12\x45\n\routput_config\x18\x06 \x01(\x0b\x32).google.cloud.translation.v3.OutputConfigB\x03\xe0\x41\x02\x12_\n\nglossaries\x18\x07 \x03(\x0b\x32\x46.google.cloud.translation.v3.BatchTranslateTextRequest.GlossariesEntryB\x03\xe0\x41\x01\x12W\n\x06labels\x18\t \x03(\x0b\x32\x42.google.cloud.translation.v3.BatchTranslateTextRequest.LabelsEntryB\x03\xe0\x41\x01\x1a-\n\x0bModelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1ak\n\x0fGlossariesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12G\n\x05value\x18\x02 \x01(\x0b\x32\x38.google.cloud.translation.v3.TranslateTextGlossaryConfig:\x02\x38\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xce\x02\n\x16\x42\x61tchTranslateMetadata\x12H\n\x05state\x18\x01 \x01(\x0e\x32\x39.google.cloud.translation.v3.BatchTranslateMetadata.State\x12\x1d\n\x15translated_characters\x18\x02 \x01(\x03\x12\x19\n\x11\x66\x61iled_characters\x18\x03 \x01(\x03\x12\x18\n\x10total_characters\x18\x04 \x01(\x03\x12/\n\x0bsubmit_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\xcb\x01\n\x16\x42\x61tchTranslateResponse\x12\x18\n\x10total_characters\x18\x01 \x01(\x03\x12\x1d\n\x15translated_characters\x18\x02 \x01(\x03\x12\x19\n\x11\x66\x61iled_characters\x18\x03 \x01(\x03\x12/\n\x0bsubmit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"]\n\x13GlossaryInputConfig\x12<\n\ngcs_source\x18\x01 \x01(\x0b\x32&.google.cloud.translation.v3.GcsSourceH\x00\x42\x08\n\x06source"\xfa\x04\n\x08Glossary\x12\x0c\n\x04name\x18\x01 \x01(\t\x12O\n\rlanguage_pair\x18\x03 \x01(\x0b\x32\x36.google.cloud.translation.v3.Glossary.LanguageCodePairH\x00\x12T\n\x12language_codes_set\x18\x04 \x01(\x0b\x32\x36.google.cloud.translation.v3.Glossary.LanguageCodesSetH\x00\x12\x46\n\x0cinput_config\x18\x05 \x01(\x0b\x32\x30.google.cloud.translation.v3.GlossaryInputConfig\x12\x18\n\x0b\x65ntry_count\x18\x06 \x01(\x05\x42\x03\xe0\x41\x03\x12\x34\n\x0bsubmit_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x1aN\n\x10LanguageCodePair\x12\x1c\n\x14source_language_code\x18\x01 \x01(\t\x12\x1c\n\x14target_language_code\x18\x02 \x01(\t\x1a*\n\x10LanguageCodesSet\x12\x16\n\x0elanguage_codes\x18\x01 \x03(\t:e\xea\x41\x62\n!translate.googleapis.com/Glossary\x12=projects/{project}/locations/{location}/glossaries/{glossary}B\x0b\n\tlanguages"\x90\x01\n\x15\x43reateGlossaryRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12<\n\x08glossary\x18\x02 \x01(\x0b\x32%.google.cloud.translation.v3.GlossaryB\x03\xe0\x41\x02"M\n\x12GetGlossaryRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!translate.googleapis.com/Glossary"P\n\x15\x44\x65leteGlossaryRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!translate.googleapis.com/Glossary"\x98\x01\n\x15ListGlossariesRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01"l\n\x16ListGlossariesResponse\x12\x39\n\nglossaries\x18\x01 \x03(\x0b\x32%.google.cloud.translation.v3.Glossary\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x88\x02\n\x16\x43reateGlossaryMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12H\n\x05state\x18\x02 \x01(\x0e\x32\x39.google.cloud.translation.v3.CreateGlossaryMetadata.State\x12/\n\x0bsubmit_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\x88\x02\n\x16\x44\x65leteGlossaryMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12H\n\x05state\x18\x02 \x01(\x0e\x32\x39.google.cloud.translation.v3.DeleteGlossaryMetadata.State\x12/\n\x0bsubmit_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\x85\x01\n\x16\x44\x65leteGlossaryResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0bsubmit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp2\x8e\x10\n\x12TranslationService\x12\xd4\x02\n\rTranslateText\x12\x31.google.cloud.translation.v3.TranslateTextRequest\x1a\x32.google.cloud.translation.v3.TranslateTextResponse"\xdb\x01\x82\xd3\xe4\x93\x02\x62"1/v3/{parent=projects/*/locations/*}:translateText:\x01*Z*"%/v3/{parent=projects/*}:translateText:\x01*\xda\x41$parent,target_language_code,contents\xda\x41Iparent,model,mime_type,source_language_code,target_language_code,contents\x12\x87\x02\n\x0e\x44\x65tectLanguage\x12\x32.google.cloud.translation.v3.DetectLanguageRequest\x1a\x33.google.cloud.translation.v3.DetectLanguageResponse"\x8b\x01\x82\xd3\xe4\x93\x02\x64"2/v3/{parent=projects/*/locations/*}:detectLanguage:\x01*Z+"&/v3/{parent=projects/*}:detectLanguage:\x01*\xda\x41\x1eparent,model,mime_type,content\x12\x97\x02\n\x15GetSupportedLanguages\x12\x39.google.cloud.translation.v3.GetSupportedLanguagesRequest\x1a/.google.cloud.translation.v3.SupportedLanguages"\x91\x01\x82\xd3\xe4\x93\x02\x66\x12\x36/v3/{parent=projects/*/locations/*}/supportedLanguagesZ,\x12*/v3/{parent=projects/*}/supportedLanguages\xda\x41"parent,model,display_language_code\x12\xe1\x01\n\x12\x42\x61tchTranslateText\x12\x36.google.cloud.translation.v3.BatchTranslateTextRequest\x1a\x1d.google.longrunning.Operation"t\x82\xd3\xe4\x93\x02;"6/v3/{parent=projects/*/locations/*}:batchTranslateText:\x01*\xca\x41\x30\n\x16\x42\x61tchTranslateResponse\x12\x16\x42\x61tchTranslateMetadata\x12\xdc\x01\n\x0e\x43reateGlossary\x12\x32.google.cloud.translation.v3.CreateGlossaryRequest\x1a\x1d.google.longrunning.Operation"w\x82\xd3\xe4\x93\x02:"./v3/{parent=projects/*/locations/*}/glossaries:\x08glossary\xda\x41\x0fparent,glossary\xca\x41"\n\x08Glossary\x12\x16\x43reateGlossaryMetadata\x12\xba\x01\n\x0eListGlossaries\x12\x32.google.cloud.translation.v3.ListGlossariesRequest\x1a\x33.google.cloud.translation.v3.ListGlossariesResponse"?\x82\xd3\xe4\x93\x02\x30\x12./v3/{parent=projects/*/locations/*}/glossaries\xda\x41\x06parent\x12\xa4\x01\n\x0bGetGlossary\x12/.google.cloud.translation.v3.GetGlossaryRequest\x1a%.google.cloud.translation.v3.Glossary"=\x82\xd3\xe4\x93\x02\x30\x12./v3/{name=projects/*/locations/*/glossaries/*}\xda\x41\x04name\x12\xd5\x01\n\x0e\x44\x65leteGlossary\x12\x32.google.cloud.translation.v3.DeleteGlossaryRequest\x1a\x1d.google.longrunning.Operation"p\x82\xd3\xe4\x93\x02\x30*./v3/{name=projects/*/locations/*/glossaries/*}\xda\x41\x04name\xca\x41\x30\n\x16\x44\x65leteGlossaryResponse\x12\x16\x44\x65leteGlossaryMetadata\x1a~\xca\x41\x18translate.googleapis.com\xd2\x41`https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-translationB\xd8\x01\n\x1d\x63om.google.cloud.translate.v3B\x17TranslationServiceProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/translate/v3;translate\xf8\x01\x01\xaa\x02\x19Google.Cloud.Translate.V3\xca\x02\x19Google\\Cloud\\Translate\\V3\xea\x02\x1cGoogle::Cloud::Translate::V3b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) + + +_BATCHTRANSLATEMETADATA_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.cloud.translation.v3.BatchTranslateMetadata.State", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="RUNNING", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SUCCEEDED", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="FAILED", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLING", index=4, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLED", index=5, number=5, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=3231, + serialized_end=3332, +) +_sym_db.RegisterEnumDescriptor(_BATCHTRANSLATEMETADATA_STATE) + +_CREATEGLOSSARYMETADATA_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.cloud.translation.v3.CreateGlossaryMetadata.State", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="RUNNING", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SUCCEEDED", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="FAILED", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLING", index=4, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLED", index=5, number=5, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=3231, + serialized_end=3332, +) +_sym_db.RegisterEnumDescriptor(_CREATEGLOSSARYMETADATA_STATE) + +_DELETEGLOSSARYMETADATA_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.cloud.translation.v3.DeleteGlossaryMetadata.State", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="RUNNING", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SUCCEEDED", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="FAILED", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLING", index=4, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLED", index=5, number=5, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=3231, + serialized_end=3332, +) +_sym_db.RegisterEnumDescriptor(_DELETEGLOSSARYMETADATA_STATE) + + +_TRANSLATETEXTGLOSSARYCONFIG = _descriptor.Descriptor( + name="TranslateTextGlossaryConfig", + full_name="google.cloud.translation.v3.TranslateTextGlossaryConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="glossary", + full_name="google.cloud.translation.v3.TranslateTextGlossaryConfig.glossary", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="ignore_case", + full_name="google.cloud.translation.v3.TranslateTextGlossaryConfig.ignore_case", + index=1, + number=2, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=277, + serialized_end=355, +) + + +_TRANSLATETEXTREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.cloud.translation.v3.TranslateTextRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3.TranslateTextRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3.TranslateTextRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=750, + serialized_end=795, +) + +_TRANSLATETEXTREQUEST = _descriptor.Descriptor( + name="TranslateTextRequest", + full_name="google.cloud.translation.v3.TranslateTextRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="contents", + full_name="google.cloud.translation.v3.TranslateTextRequest.contents", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mime_type", + full_name="google.cloud.translation.v3.TranslateTextRequest.mime_type", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_language_code", + full_name="google.cloud.translation.v3.TranslateTextRequest.source_language_code", + index=2, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="target_language_code", + full_name="google.cloud.translation.v3.TranslateTextRequest.target_language_code", + index=3, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.translation.v3.TranslateTextRequest.parent", + index=4, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="model", + full_name="google.cloud.translation.v3.TranslateTextRequest.model", + index=5, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="glossary_config", + full_name="google.cloud.translation.v3.TranslateTextRequest.glossary_config", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.cloud.translation.v3.TranslateTextRequest.labels", + index=7, + number=10, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_TRANSLATETEXTREQUEST_LABELSENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=358, + serialized_end=795, +) + + +_TRANSLATETEXTRESPONSE = _descriptor.Descriptor( + name="TranslateTextResponse", + full_name="google.cloud.translation.v3.TranslateTextResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="translations", + full_name="google.cloud.translation.v3.TranslateTextResponse.translations", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="glossary_translations", + full_name="google.cloud.translation.v3.TranslateTextResponse.glossary_translations", + index=1, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=798, + serialized_end=958, +) + + +_TRANSLATION = _descriptor.Descriptor( + name="Translation", + full_name="google.cloud.translation.v3.Translation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="translated_text", + full_name="google.cloud.translation.v3.Translation.translated_text", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="model", + full_name="google.cloud.translation.v3.Translation.model", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="detected_language_code", + full_name="google.cloud.translation.v3.Translation.detected_language_code", + index=2, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="glossary_config", + full_name="google.cloud.translation.v3.Translation.glossary_config", + index=3, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=961, + serialized_end=1129, +) + + +_DETECTLANGUAGEREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.cloud.translation.v3.DetectLanguageRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3.DetectLanguageRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3.DetectLanguageRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=750, + serialized_end=795, +) + +_DETECTLANGUAGEREQUEST = _descriptor.Descriptor( + name="DetectLanguageRequest", + full_name="google.cloud.translation.v3.DetectLanguageRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.translation.v3.DetectLanguageRequest.parent", + index=0, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="model", + full_name="google.cloud.translation.v3.DetectLanguageRequest.model", + index=1, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="content", + full_name="google.cloud.translation.v3.DetectLanguageRequest.content", + index=2, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mime_type", + full_name="google.cloud.translation.v3.DetectLanguageRequest.mime_type", + index=3, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.cloud.translation.v3.DetectLanguageRequest.labels", + index=4, + number=6, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_DETECTLANGUAGEREQUEST_LABELSENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="source", + full_name="google.cloud.translation.v3.DetectLanguageRequest.source", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1132, + serialized_end=1419, +) + + +_DETECTEDLANGUAGE = _descriptor.Descriptor( + name="DetectedLanguage", + full_name="google.cloud.translation.v3.DetectedLanguage", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="language_code", + full_name="google.cloud.translation.v3.DetectedLanguage.language_code", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="confidence", + full_name="google.cloud.translation.v3.DetectedLanguage.confidence", + index=1, + number=2, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1421, + serialized_end=1482, +) + + +_DETECTLANGUAGERESPONSE = _descriptor.Descriptor( + name="DetectLanguageResponse", + full_name="google.cloud.translation.v3.DetectLanguageResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="languages", + full_name="google.cloud.translation.v3.DetectLanguageResponse.languages", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1484, + serialized_end=1574, +) + + +_GETSUPPORTEDLANGUAGESREQUEST = _descriptor.Descriptor( + name="GetSupportedLanguagesRequest", + full_name="google.cloud.translation.v3.GetSupportedLanguagesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.translation.v3.GetSupportedLanguagesRequest.parent", + index=0, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="display_language_code", + full_name="google.cloud.translation.v3.GetSupportedLanguagesRequest.display_language_code", + index=1, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="model", + full_name="google.cloud.translation.v3.GetSupportedLanguagesRequest.model", + index=2, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1577, + serialized_end=1722, +) + + +_SUPPORTEDLANGUAGES = _descriptor.Descriptor( + name="SupportedLanguages", + full_name="google.cloud.translation.v3.SupportedLanguages", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="languages", + full_name="google.cloud.translation.v3.SupportedLanguages.languages", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1724, + serialized_end=1811, +) + + +_SUPPORTEDLANGUAGE = _descriptor.Descriptor( + name="SupportedLanguage", + full_name="google.cloud.translation.v3.SupportedLanguage", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="language_code", + full_name="google.cloud.translation.v3.SupportedLanguage.language_code", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="display_name", + full_name="google.cloud.translation.v3.SupportedLanguage.display_name", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="support_source", + full_name="google.cloud.translation.v3.SupportedLanguage.support_source", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="support_target", + full_name="google.cloud.translation.v3.SupportedLanguage.support_target", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1813, + serialized_end=1925, +) + + +_GCSSOURCE = _descriptor.Descriptor( + name="GcsSource", + full_name="google.cloud.translation.v3.GcsSource", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="input_uri", + full_name="google.cloud.translation.v3.GcsSource.input_uri", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1927, + serialized_end=1957, +) + + +_INPUTCONFIG = _descriptor.Descriptor( + name="InputConfig", + full_name="google.cloud.translation.v3.InputConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="mime_type", + full_name="google.cloud.translation.v3.InputConfig.mime_type", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="gcs_source", + full_name="google.cloud.translation.v3.InputConfig.gcs_source", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="source", + full_name="google.cloud.translation.v3.InputConfig.source", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1959, + serialized_end=2068, +) + + +_GCSDESTINATION = _descriptor.Descriptor( + name="GcsDestination", + full_name="google.cloud.translation.v3.GcsDestination", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="output_uri_prefix", + full_name="google.cloud.translation.v3.GcsDestination.output_uri_prefix", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2070, + serialized_end=2113, +) + + +_OUTPUTCONFIG = _descriptor.Descriptor( + name="OutputConfig", + full_name="google.cloud.translation.v3.OutputConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="gcs_destination", + full_name="google.cloud.translation.v3.OutputConfig.gcs_destination", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="destination", + full_name="google.cloud.translation.v3.OutputConfig.destination", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=2115, + serialized_end=2216, +) + + +_BATCHTRANSLATETEXTREQUEST_MODELSENTRY = _descriptor.Descriptor( + name="ModelsEntry", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.ModelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.ModelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.ModelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2794, + serialized_end=2839, +) + +_BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY = _descriptor.Descriptor( + name="GlossariesEntry", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.GlossariesEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.GlossariesEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.GlossariesEntry.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2841, + serialized_end=2948, +) + +_BATCHTRANSLATETEXTREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=750, + serialized_end=795, +) + +_BATCHTRANSLATETEXTREQUEST = _descriptor.Descriptor( + name="BatchTranslateTextRequest", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_language_code", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.source_language_code", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="target_language_codes", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.target_language_codes", + index=2, + number=3, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="models", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.models", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="input_configs", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.input_configs", + index=4, + number=5, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="output_config", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.output_config", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="glossaries", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.glossaries", + index=6, + number=7, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.labels", + index=7, + number=9, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[ + _BATCHTRANSLATETEXTREQUEST_MODELSENTRY, + _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY, + _BATCHTRANSLATETEXTREQUEST_LABELSENTRY, + ], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2219, + serialized_end=2995, +) + + +_BATCHTRANSLATEMETADATA = _descriptor.Descriptor( + name="BatchTranslateMetadata", + full_name="google.cloud.translation.v3.BatchTranslateMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="state", + full_name="google.cloud.translation.v3.BatchTranslateMetadata.state", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="translated_characters", + full_name="google.cloud.translation.v3.BatchTranslateMetadata.translated_characters", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="failed_characters", + full_name="google.cloud.translation.v3.BatchTranslateMetadata.failed_characters", + index=2, + number=3, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="total_characters", + full_name="google.cloud.translation.v3.BatchTranslateMetadata.total_characters", + index=3, + number=4, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="submit_time", + full_name="google.cloud.translation.v3.BatchTranslateMetadata.submit_time", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_BATCHTRANSLATEMETADATA_STATE], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2998, + serialized_end=3332, +) + + +_BATCHTRANSLATERESPONSE = _descriptor.Descriptor( + name="BatchTranslateResponse", + full_name="google.cloud.translation.v3.BatchTranslateResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="total_characters", + full_name="google.cloud.translation.v3.BatchTranslateResponse.total_characters", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="translated_characters", + full_name="google.cloud.translation.v3.BatchTranslateResponse.translated_characters", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="failed_characters", + full_name="google.cloud.translation.v3.BatchTranslateResponse.failed_characters", + index=2, + number=3, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="submit_time", + full_name="google.cloud.translation.v3.BatchTranslateResponse.submit_time", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="google.cloud.translation.v3.BatchTranslateResponse.end_time", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3335, + serialized_end=3538, +) + + +_GLOSSARYINPUTCONFIG = _descriptor.Descriptor( + name="GlossaryInputConfig", + full_name="google.cloud.translation.v3.GlossaryInputConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="gcs_source", + full_name="google.cloud.translation.v3.GlossaryInputConfig.gcs_source", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="source", + full_name="google.cloud.translation.v3.GlossaryInputConfig.source", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=3540, + serialized_end=3633, +) + + +_GLOSSARY_LANGUAGECODEPAIR = _descriptor.Descriptor( + name="LanguageCodePair", + full_name="google.cloud.translation.v3.Glossary.LanguageCodePair", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="source_language_code", + full_name="google.cloud.translation.v3.Glossary.LanguageCodePair.source_language_code", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="target_language_code", + full_name="google.cloud.translation.v3.Glossary.LanguageCodePair.target_language_code", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4032, + serialized_end=4110, +) + +_GLOSSARY_LANGUAGECODESSET = _descriptor.Descriptor( + name="LanguageCodesSet", + full_name="google.cloud.translation.v3.Glossary.LanguageCodesSet", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="language_codes", + full_name="google.cloud.translation.v3.Glossary.LanguageCodesSet.language_codes", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4112, + serialized_end=4154, +) + +_GLOSSARY = _descriptor.Descriptor( + name="Glossary", + full_name="google.cloud.translation.v3.Glossary", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.translation.v3.Glossary.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="language_pair", + full_name="google.cloud.translation.v3.Glossary.language_pair", + index=1, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="language_codes_set", + full_name="google.cloud.translation.v3.Glossary.language_codes_set", + index=2, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="input_config", + full_name="google.cloud.translation.v3.Glossary.input_config", + index=3, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entry_count", + full_name="google.cloud.translation.v3.Glossary.entry_count", + index=4, + number=6, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="submit_time", + full_name="google.cloud.translation.v3.Glossary.submit_time", + index=5, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="google.cloud.translation.v3.Glossary.end_time", + index=6, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_GLOSSARY_LANGUAGECODEPAIR, _GLOSSARY_LANGUAGECODESSET], + enum_types=[], + serialized_options=_b( + "\352Ab\n!translate.googleapis.com/Glossary\022=projects/{project}/locations/{location}/glossaries/{glossary}" + ), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="languages", + full_name="google.cloud.translation.v3.Glossary.languages", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=3636, + serialized_end=4270, +) + + +_CREATEGLOSSARYREQUEST = _descriptor.Descriptor( + name="CreateGlossaryRequest", + full_name="google.cloud.translation.v3.CreateGlossaryRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.translation.v3.CreateGlossaryRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="glossary", + full_name="google.cloud.translation.v3.CreateGlossaryRequest.glossary", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4273, + serialized_end=4417, +) + + +_GETGLOSSARYREQUEST = _descriptor.Descriptor( + name="GetGlossaryRequest", + full_name="google.cloud.translation.v3.GetGlossaryRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.translation.v3.GetGlossaryRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!translate.googleapis.com/Glossary" + ), + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4419, + serialized_end=4496, +) + + +_DELETEGLOSSARYREQUEST = _descriptor.Descriptor( + name="DeleteGlossaryRequest", + full_name="google.cloud.translation.v3.DeleteGlossaryRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.translation.v3.DeleteGlossaryRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!translate.googleapis.com/Glossary" + ), + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4498, + serialized_end=4578, +) + + +_LISTGLOSSARIESREQUEST = _descriptor.Descriptor( + name="ListGlossariesRequest", + full_name="google.cloud.translation.v3.ListGlossariesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.translation.v3.ListGlossariesRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.cloud.translation.v3.ListGlossariesRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.cloud.translation.v3.ListGlossariesRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.cloud.translation.v3.ListGlossariesRequest.filter", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4581, + serialized_end=4733, +) + + +_LISTGLOSSARIESRESPONSE = _descriptor.Descriptor( + name="ListGlossariesResponse", + full_name="google.cloud.translation.v3.ListGlossariesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="glossaries", + full_name="google.cloud.translation.v3.ListGlossariesResponse.glossaries", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.cloud.translation.v3.ListGlossariesResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4735, + serialized_end=4843, +) + + +_CREATEGLOSSARYMETADATA = _descriptor.Descriptor( + name="CreateGlossaryMetadata", + full_name="google.cloud.translation.v3.CreateGlossaryMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.translation.v3.CreateGlossaryMetadata.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state", + full_name="google.cloud.translation.v3.CreateGlossaryMetadata.state", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="submit_time", + full_name="google.cloud.translation.v3.CreateGlossaryMetadata.submit_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_CREATEGLOSSARYMETADATA_STATE], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4846, + serialized_end=5110, +) + + +_DELETEGLOSSARYMETADATA = _descriptor.Descriptor( + name="DeleteGlossaryMetadata", + full_name="google.cloud.translation.v3.DeleteGlossaryMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.translation.v3.DeleteGlossaryMetadata.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state", + full_name="google.cloud.translation.v3.DeleteGlossaryMetadata.state", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="submit_time", + full_name="google.cloud.translation.v3.DeleteGlossaryMetadata.submit_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_DELETEGLOSSARYMETADATA_STATE], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=5113, + serialized_end=5377, +) + + +_DELETEGLOSSARYRESPONSE = _descriptor.Descriptor( + name="DeleteGlossaryResponse", + full_name="google.cloud.translation.v3.DeleteGlossaryResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.translation.v3.DeleteGlossaryResponse.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="submit_time", + full_name="google.cloud.translation.v3.DeleteGlossaryResponse.submit_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="google.cloud.translation.v3.DeleteGlossaryResponse.end_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=5380, + serialized_end=5513, +) + +_TRANSLATETEXTREQUEST_LABELSENTRY.containing_type = _TRANSLATETEXTREQUEST +_TRANSLATETEXTREQUEST.fields_by_name[ + "glossary_config" +].message_type = _TRANSLATETEXTGLOSSARYCONFIG +_TRANSLATETEXTREQUEST.fields_by_name[ + "labels" +].message_type = _TRANSLATETEXTREQUEST_LABELSENTRY +_TRANSLATETEXTRESPONSE.fields_by_name["translations"].message_type = _TRANSLATION +_TRANSLATETEXTRESPONSE.fields_by_name[ + "glossary_translations" +].message_type = _TRANSLATION +_TRANSLATION.fields_by_name[ + "glossary_config" +].message_type = _TRANSLATETEXTGLOSSARYCONFIG +_DETECTLANGUAGEREQUEST_LABELSENTRY.containing_type = _DETECTLANGUAGEREQUEST +_DETECTLANGUAGEREQUEST.fields_by_name[ + "labels" +].message_type = _DETECTLANGUAGEREQUEST_LABELSENTRY +_DETECTLANGUAGEREQUEST.oneofs_by_name["source"].fields.append( + _DETECTLANGUAGEREQUEST.fields_by_name["content"] +) +_DETECTLANGUAGEREQUEST.fields_by_name[ + "content" +].containing_oneof = _DETECTLANGUAGEREQUEST.oneofs_by_name["source"] +_DETECTLANGUAGERESPONSE.fields_by_name["languages"].message_type = _DETECTEDLANGUAGE +_SUPPORTEDLANGUAGES.fields_by_name["languages"].message_type = _SUPPORTEDLANGUAGE +_INPUTCONFIG.fields_by_name["gcs_source"].message_type = _GCSSOURCE +_INPUTCONFIG.oneofs_by_name["source"].fields.append( + _INPUTCONFIG.fields_by_name["gcs_source"] +) +_INPUTCONFIG.fields_by_name[ + "gcs_source" +].containing_oneof = _INPUTCONFIG.oneofs_by_name["source"] +_OUTPUTCONFIG.fields_by_name["gcs_destination"].message_type = _GCSDESTINATION +_OUTPUTCONFIG.oneofs_by_name["destination"].fields.append( + _OUTPUTCONFIG.fields_by_name["gcs_destination"] +) +_OUTPUTCONFIG.fields_by_name[ + "gcs_destination" +].containing_oneof = _OUTPUTCONFIG.oneofs_by_name["destination"] +_BATCHTRANSLATETEXTREQUEST_MODELSENTRY.containing_type = _BATCHTRANSLATETEXTREQUEST +_BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY.fields_by_name[ + "value" +].message_type = _TRANSLATETEXTGLOSSARYCONFIG +_BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY.containing_type = _BATCHTRANSLATETEXTREQUEST +_BATCHTRANSLATETEXTREQUEST_LABELSENTRY.containing_type = _BATCHTRANSLATETEXTREQUEST +_BATCHTRANSLATETEXTREQUEST.fields_by_name[ + "models" +].message_type = _BATCHTRANSLATETEXTREQUEST_MODELSENTRY +_BATCHTRANSLATETEXTREQUEST.fields_by_name["input_configs"].message_type = _INPUTCONFIG +_BATCHTRANSLATETEXTREQUEST.fields_by_name["output_config"].message_type = _OUTPUTCONFIG +_BATCHTRANSLATETEXTREQUEST.fields_by_name[ + "glossaries" +].message_type = _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY +_BATCHTRANSLATETEXTREQUEST.fields_by_name[ + "labels" +].message_type = _BATCHTRANSLATETEXTREQUEST_LABELSENTRY +_BATCHTRANSLATEMETADATA.fields_by_name[ + "state" +].enum_type = _BATCHTRANSLATEMETADATA_STATE +_BATCHTRANSLATEMETADATA.fields_by_name[ + "submit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_BATCHTRANSLATEMETADATA_STATE.containing_type = _BATCHTRANSLATEMETADATA +_BATCHTRANSLATERESPONSE.fields_by_name[ + "submit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_BATCHTRANSLATERESPONSE.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_GLOSSARYINPUTCONFIG.fields_by_name["gcs_source"].message_type = _GCSSOURCE +_GLOSSARYINPUTCONFIG.oneofs_by_name["source"].fields.append( + _GLOSSARYINPUTCONFIG.fields_by_name["gcs_source"] +) +_GLOSSARYINPUTCONFIG.fields_by_name[ + "gcs_source" +].containing_oneof = _GLOSSARYINPUTCONFIG.oneofs_by_name["source"] +_GLOSSARY_LANGUAGECODEPAIR.containing_type = _GLOSSARY +_GLOSSARY_LANGUAGECODESSET.containing_type = _GLOSSARY +_GLOSSARY.fields_by_name["language_pair"].message_type = _GLOSSARY_LANGUAGECODEPAIR +_GLOSSARY.fields_by_name["language_codes_set"].message_type = _GLOSSARY_LANGUAGECODESSET +_GLOSSARY.fields_by_name["input_config"].message_type = _GLOSSARYINPUTCONFIG +_GLOSSARY.fields_by_name[ + "submit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_GLOSSARY.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_GLOSSARY.oneofs_by_name["languages"].fields.append( + _GLOSSARY.fields_by_name["language_pair"] +) +_GLOSSARY.fields_by_name["language_pair"].containing_oneof = _GLOSSARY.oneofs_by_name[ + "languages" +] +_GLOSSARY.oneofs_by_name["languages"].fields.append( + _GLOSSARY.fields_by_name["language_codes_set"] +) +_GLOSSARY.fields_by_name[ + "language_codes_set" +].containing_oneof = _GLOSSARY.oneofs_by_name["languages"] +_CREATEGLOSSARYREQUEST.fields_by_name["glossary"].message_type = _GLOSSARY +_LISTGLOSSARIESRESPONSE.fields_by_name["glossaries"].message_type = _GLOSSARY +_CREATEGLOSSARYMETADATA.fields_by_name[ + "state" +].enum_type = _CREATEGLOSSARYMETADATA_STATE +_CREATEGLOSSARYMETADATA.fields_by_name[ + "submit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_CREATEGLOSSARYMETADATA_STATE.containing_type = _CREATEGLOSSARYMETADATA +_DELETEGLOSSARYMETADATA.fields_by_name[ + "state" +].enum_type = _DELETEGLOSSARYMETADATA_STATE +_DELETEGLOSSARYMETADATA.fields_by_name[ + "submit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DELETEGLOSSARYMETADATA_STATE.containing_type = _DELETEGLOSSARYMETADATA +_DELETEGLOSSARYRESPONSE.fields_by_name[ + "submit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DELETEGLOSSARYRESPONSE.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +DESCRIPTOR.message_types_by_name[ + "TranslateTextGlossaryConfig" +] = _TRANSLATETEXTGLOSSARYCONFIG +DESCRIPTOR.message_types_by_name["TranslateTextRequest"] = _TRANSLATETEXTREQUEST +DESCRIPTOR.message_types_by_name["TranslateTextResponse"] = _TRANSLATETEXTRESPONSE +DESCRIPTOR.message_types_by_name["Translation"] = _TRANSLATION +DESCRIPTOR.message_types_by_name["DetectLanguageRequest"] = _DETECTLANGUAGEREQUEST +DESCRIPTOR.message_types_by_name["DetectedLanguage"] = _DETECTEDLANGUAGE +DESCRIPTOR.message_types_by_name["DetectLanguageResponse"] = _DETECTLANGUAGERESPONSE +DESCRIPTOR.message_types_by_name[ + "GetSupportedLanguagesRequest" +] = _GETSUPPORTEDLANGUAGESREQUEST +DESCRIPTOR.message_types_by_name["SupportedLanguages"] = _SUPPORTEDLANGUAGES +DESCRIPTOR.message_types_by_name["SupportedLanguage"] = _SUPPORTEDLANGUAGE +DESCRIPTOR.message_types_by_name["GcsSource"] = _GCSSOURCE +DESCRIPTOR.message_types_by_name["InputConfig"] = _INPUTCONFIG +DESCRIPTOR.message_types_by_name["GcsDestination"] = _GCSDESTINATION +DESCRIPTOR.message_types_by_name["OutputConfig"] = _OUTPUTCONFIG +DESCRIPTOR.message_types_by_name[ + "BatchTranslateTextRequest" +] = _BATCHTRANSLATETEXTREQUEST +DESCRIPTOR.message_types_by_name["BatchTranslateMetadata"] = _BATCHTRANSLATEMETADATA +DESCRIPTOR.message_types_by_name["BatchTranslateResponse"] = _BATCHTRANSLATERESPONSE +DESCRIPTOR.message_types_by_name["GlossaryInputConfig"] = _GLOSSARYINPUTCONFIG +DESCRIPTOR.message_types_by_name["Glossary"] = _GLOSSARY +DESCRIPTOR.message_types_by_name["CreateGlossaryRequest"] = _CREATEGLOSSARYREQUEST +DESCRIPTOR.message_types_by_name["GetGlossaryRequest"] = _GETGLOSSARYREQUEST +DESCRIPTOR.message_types_by_name["DeleteGlossaryRequest"] = _DELETEGLOSSARYREQUEST +DESCRIPTOR.message_types_by_name["ListGlossariesRequest"] = _LISTGLOSSARIESREQUEST +DESCRIPTOR.message_types_by_name["ListGlossariesResponse"] = _LISTGLOSSARIESRESPONSE +DESCRIPTOR.message_types_by_name["CreateGlossaryMetadata"] = _CREATEGLOSSARYMETADATA +DESCRIPTOR.message_types_by_name["DeleteGlossaryMetadata"] = _DELETEGLOSSARYMETADATA +DESCRIPTOR.message_types_by_name["DeleteGlossaryResponse"] = _DELETEGLOSSARYRESPONSE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +TranslateTextGlossaryConfig = _reflection.GeneratedProtocolMessageType( + "TranslateTextGlossaryConfig", + (_message.Message,), + dict( + DESCRIPTOR=_TRANSLATETEXTGLOSSARYCONFIG, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Configures which glossary should be used for a specific target language, + and defines options for applying that glossary. + + + Attributes: + glossary: + Required. Specifies the glossary used for this translation. + Use this format: projects/\ */locations/*/glossaries/\* + ignore_case: + Optional. Indicates match is case-insensitive. Default value + is false if missing. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.TranslateTextGlossaryConfig) + ), +) +_sym_db.RegisterMessage(TranslateTextGlossaryConfig) + +TranslateTextRequest = _reflection.GeneratedProtocolMessageType( + "TranslateTextRequest", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_TRANSLATETEXTREQUEST_LABELSENTRY, + __module__="google.cloud.translation_v3.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.TranslateTextRequest.LabelsEntry) + ), + ), + DESCRIPTOR=_TRANSLATETEXTREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The request message for synchronous translation. + + + Attributes: + contents: + Required. The content of the input in string format. We + recommend the total content be less than 30k codepoints. Use + BatchTranslateText for larger text. + mime_type: + Optional. The format of the source text, for example, + "text/html", "text/plain". If left blank, the MIME type + defaults to "text/html". + source_language_code: + Optional. The BCP-47 language code of the input text if known, + for example, "en-US" or "sr-Latn". Supported language codes + are listed in Language Support. If the source language isn't + specified, the API attempts to identify the source language + automatically and returns the source language within the + response. + target_language_code: + Required. The BCP-47 language code to use for translation of + the input text, set to one of the language codes listed in + Language Support. + parent: + Required. Project or location to make a call. Must refer to a + caller's project. Format: ``projects/{project-number-or-id}`` + or ``projects/{project-number-or-id}/locations/{location- + id}``. For global calls, use ``projects/{project-number-or- + id}/locations/global`` or ``projects/{project-number-or-id}``. + Non-global location is required for requests using AutoML + models or custom glossaries. Models and glossaries must be + within the same region (have same location-id), otherwise an + INVALID\_ARGUMENT (400) error is returned. + model: + Optional. The ``model`` type requested for this translation. + The format depends on model type: - AutoML Translation + models: ``projects/{project-number-or- + id}/locations/{location-id}/models/{model-id}`` - General + (built-in) models: ``projects/{project-number-or- + id}/locations/{location-id}/models/general/nmt``, + ``projects/{project-number-or-id}/locations/{location- + id}/models/general/base`` For global (non-regionalized) + requests, use ``location-id`` ``global``. For example, + ``projects/{project-number-or- + id}/locations/global/models/general/nmt``. If missing, the + system decides which google base model to use. + glossary_config: + Optional. Glossary to be applied. The glossary must be within + the same region (have the same location-id) as the model, + otherwise an INVALID\_ARGUMENT (400) error is returned. + labels: + Optional. The labels with user-defined metadata for the + request. Label keys and values can be no longer than 63 + characters (Unicode codepoints), can only contain lowercase + letters, numeric characters, underscores and dashes. + International characters are allowed. Label values are + optional. Label keys must start with a letter. See + https://cloud.google.com/translate/docs/labels for more + information. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.TranslateTextRequest) + ), +) +_sym_db.RegisterMessage(TranslateTextRequest) +_sym_db.RegisterMessage(TranslateTextRequest.LabelsEntry) + +TranslateTextResponse = _reflection.GeneratedProtocolMessageType( + "TranslateTextResponse", + (_message.Message,), + dict( + DESCRIPTOR=_TRANSLATETEXTRESPONSE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__=""" + Attributes: + translations: + Text translation responses with no glossary applied. This + field has the same length as [``contents``][google.cloud.trans + lation.v3.TranslateTextRequest.contents]. + glossary_translations: + Text translation responses if a glossary is provided in the + request. This can be the same as [``translations``][google.clo + ud.translation.v3.TranslateTextResponse.translations] if no + terms apply. This field has the same length as [``contents``][ + google.cloud.translation.v3.TranslateTextRequest.contents]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.TranslateTextResponse) + ), +) +_sym_db.RegisterMessage(TranslateTextResponse) + +Translation = _reflection.GeneratedProtocolMessageType( + "Translation", + (_message.Message,), + dict( + DESCRIPTOR=_TRANSLATION, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""A single translation response. + + + Attributes: + translated_text: + Text translated into the target language. + model: + Only present when ``model`` is present in the request. + ``model`` here is normalized to have project number. For + example: If the ``model`` requested in TranslationTextRequest + is ``projects/{project-id}/locations/{location- + id}/models/general/nmt`` then ``model`` here would be + normalized to ``projects/{project-number}/locations/{location- + id}/models/general/nmt``. + detected_language_code: + The BCP-47 language code of source text in the initial + request, detected automatically, if no source language was + passed within the initial request. If the source language was + passed, auto-detection of the language does not occur and this + field is empty. + glossary_config: + The ``glossary_config`` used for this translation. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.Translation) + ), +) +_sym_db.RegisterMessage(Translation) + +DetectLanguageRequest = _reflection.GeneratedProtocolMessageType( + "DetectLanguageRequest", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_DETECTLANGUAGEREQUEST_LABELSENTRY, + __module__="google.cloud.translation_v3.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DetectLanguageRequest.LabelsEntry) + ), + ), + DESCRIPTOR=_DETECTLANGUAGEREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The request message for language detection. + + + Attributes: + parent: + Required. Project or location to make a call. Must refer to a + caller's project. Format: ``projects/{project-number-or- + id}/locations/{location-id}`` or ``projects/{project-number- + or-id}``. For global calls, use ``projects/{project-number- + or-id}/locations/global`` or ``projects/{project-number-or- + id}``. Only models within the same region (has same location- + id) can be used. Otherwise an INVALID\_ARGUMENT (400) error is + returned. + model: + Optional. The language detection model to be used. Format: + ``projects/{project-number-or-id}/locations/{location- + id}/models/language-detection/{model-id}`` Only one language + detection model is currently supported: ``projects/{project- + number-or-id}/locations/{location-id}/models/language- + detection/default``. If not specified, the default model is + used. + source: + Required. The source of the document from which to detect the + language. + content: + The content of the input stored as a string. + mime_type: + Optional. The format of the source text, for example, + "text/html", "text/plain". If left blank, the MIME type + defaults to "text/html". + labels: + Optional. The labels with user-defined metadata for the + request. Label keys and values can be no longer than 63 + characters (Unicode codepoints), can only contain lowercase + letters, numeric characters, underscores and dashes. + International characters are allowed. Label values are + optional. Label keys must start with a letter. See + https://cloud.google.com/translate/docs/labels for more + information. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DetectLanguageRequest) + ), +) +_sym_db.RegisterMessage(DetectLanguageRequest) +_sym_db.RegisterMessage(DetectLanguageRequest.LabelsEntry) + +DetectedLanguage = _reflection.GeneratedProtocolMessageType( + "DetectedLanguage", + (_message.Message,), + dict( + DESCRIPTOR=_DETECTEDLANGUAGE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The response message for language detection. + + + Attributes: + language_code: + The BCP-47 language code of source content in the request, + detected automatically. + confidence: + The confidence of the detection result for this language. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DetectedLanguage) + ), +) +_sym_db.RegisterMessage(DetectedLanguage) + +DetectLanguageResponse = _reflection.GeneratedProtocolMessageType( + "DetectLanguageResponse", + (_message.Message,), + dict( + DESCRIPTOR=_DETECTLANGUAGERESPONSE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The response message for language detection. + + + Attributes: + languages: + A list of detected languages sorted by detection confidence in + descending order. The most probable language first. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DetectLanguageResponse) + ), +) +_sym_db.RegisterMessage(DetectLanguageResponse) + +GetSupportedLanguagesRequest = _reflection.GeneratedProtocolMessageType( + "GetSupportedLanguagesRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETSUPPORTEDLANGUAGESREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The request message for discovering supported languages. + + + Attributes: + parent: + Required. Project or location to make a call. Must refer to a + caller's project. Format: ``projects/{project-number-or-id}`` + or ``projects/{project-number-or-id}/locations/{location- + id}``. For global calls, use ``projects/{project-number-or- + id}/locations/global`` or ``projects/{project-number-or-id}``. + Non-global location is required for AutoML models. Only + models within the same region (have same location-id) can be + used, otherwise an INVALID\_ARGUMENT (400) error is returned. + display_language_code: + Optional. The language to use to return localized, human + readable names of supported languages. If missing, then + display names are not returned in a response. + model: + Optional. Get supported languages of this model. The format + depends on model type: - AutoML Translation models: + ``projects/{project-number-or-id}/locations/{location- + id}/models/{model-id}`` - General (built-in) models: + ``projects/{project-number-or-id}/locations/{location- + id}/models/general/nmt``, ``projects/{project-number-or- + id}/locations/{location-id}/models/general/base`` Returns + languages supported by the specified model. If missing, we get + supported languages of Google general base (PBMT) model. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.GetSupportedLanguagesRequest) + ), +) +_sym_db.RegisterMessage(GetSupportedLanguagesRequest) + +SupportedLanguages = _reflection.GeneratedProtocolMessageType( + "SupportedLanguages", + (_message.Message,), + dict( + DESCRIPTOR=_SUPPORTEDLANGUAGES, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The response message for discovering supported languages. + + + Attributes: + languages: + A list of supported language responses. This list contains an + entry for each language the Translation API supports. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.SupportedLanguages) + ), +) +_sym_db.RegisterMessage(SupportedLanguages) + +SupportedLanguage = _reflection.GeneratedProtocolMessageType( + "SupportedLanguage", + (_message.Message,), + dict( + DESCRIPTOR=_SUPPORTEDLANGUAGE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""A single supported language response corresponds to information related + to one supported language. + + + Attributes: + language_code: + Supported language code, generally consisting of its ISO 639-1 + identifier, for example, 'en', 'ja'. In certain cases, BCP-47 + codes including language and region identifiers are returned + (for example, 'zh-TW' and 'zh-CN') + display_name: + Human readable name of the language localized in the display + language specified in the request. + support_source: + Can be used as source language. + support_target: + Can be used as target language. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.SupportedLanguage) + ), +) +_sym_db.RegisterMessage(SupportedLanguage) + +GcsSource = _reflection.GeneratedProtocolMessageType( + "GcsSource", + (_message.Message,), + dict( + DESCRIPTOR=_GCSSOURCE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The Google Cloud Storage location for the input content. + + + Attributes: + input_uri: + Required. Source data URI. For example, + ``gs://my_bucket/my_object``. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.GcsSource) + ), +) +_sym_db.RegisterMessage(GcsSource) + +InputConfig = _reflection.GeneratedProtocolMessageType( + "InputConfig", + (_message.Message,), + dict( + DESCRIPTOR=_INPUTCONFIG, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Input configuration for BatchTranslateText request. + + + Attributes: + mime_type: + Optional. Can be "text/plain" or "text/html". For ``.tsv``, + "text/html" is used if mime\_type is missing. For ``.html``, + this field must be "text/html" or empty. For ``.txt``, this + field must be "text/plain" or empty. + source: + Required. Specify the input. + gcs_source: + Required. Google Cloud Storage location for the source input. + This can be a single file (for example, ``gs://translation- + test/input.tsv``) or a wildcard (for example, + ``gs://translation-test/*``). If a file extension is ``.tsv``, + it can contain either one or two columns. The first column + (optional) is the id of the text request. If the first column + is missing, we use the row number (0-based) from the input + file as the ID in the output file. The second column is the + actual text to be translated. We recommend each row be <= 10K + Unicode codepoints, otherwise an error might be returned. Note + that the input tsv must be RFC 4180 compliant. You could use + https://github.com/Clever/csvlint to check potential + formatting errors in your tsv file. csvlint --delimiter=':raw- + latex:`\t`' your\_input\_file.tsv The other supported file + extensions are ``.txt`` or ``.html``, which is treated as a + single large chunk of text. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.InputConfig) + ), +) +_sym_db.RegisterMessage(InputConfig) + +GcsDestination = _reflection.GeneratedProtocolMessageType( + "GcsDestination", + (_message.Message,), + dict( + DESCRIPTOR=_GCSDESTINATION, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The Google Cloud Storage location for the output content. + + + Attributes: + output_uri_prefix: + Required. There must be no files under 'output\_uri\_prefix'. + 'output\_uri\_prefix' must end with "/" and start with + "gs://", otherwise an INVALID\_ARGUMENT (400) error is + returned. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.GcsDestination) + ), +) +_sym_db.RegisterMessage(GcsDestination) + +OutputConfig = _reflection.GeneratedProtocolMessageType( + "OutputConfig", + (_message.Message,), + dict( + DESCRIPTOR=_OUTPUTCONFIG, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Output configuration for BatchTranslateText request. + + + Attributes: + destination: + Required. The destination of output. + gcs_destination: + Google Cloud Storage destination for output content. For every + single input file (for example, gs://a/b/c.[extension]), we + generate at most 2 \* n output files. (n is the # of + target\_language\_codes in the BatchTranslateTextRequest). + Output files (tsv) generated are compliant with RFC 4180 + except that record delimiters are ``\\\\n`` instead of + ``\\\\r\\\\n``. We don't provide any way to + change record delimiters. While the input files are being + processed, we write/update an index file 'index.csv' under + 'output\_uri\_prefix' (for example, gs://translation- + test/index.csv) The index file is generated/updated as new + files are being translated. The format is: input\_file,target + \_language\_code,translations\_file,errors\_file, + glossary\_translations\_file,glossary\_errors\_file + input\_file is one file we matched using + gcs\_source.input\_uri. target\_language\_code is provided in + the request. translations\_file contains the translations. + (details provided below) errors\_file contains the errors + during processing of the file. (details below). Both + translations\_file and errors\_file could be empty strings if + we have no content to output. glossary\_translations\_file and + glossary\_errors\_file are always empty strings if the + input\_file is tsv. They could also be empty if we have no + content to output. Once a row is present in index.csv, the + input/output matching never changes. Callers should also + expect all the content in input\_file are processed and ready + to be consumed (that is, no partial output file is written). + The format of translations\_file (for target language code + 'trg') is: gs://translation\_test/a\_b\_c\_'trg'\_translations + .[extension] If the input file extension is tsv, the output + has the following columns: Column 1: ID of the request + provided in the input, if it's not provided in the input, then + the input row number is used (0-based). Column 2: source + sentence. Column 3: translation without applying a glossary. + Empty string if there is an error. Column 4 (only present if a + glossary is provided in the request): translation after + applying the glossary. Empty string if there is an error + applying the glossary. Could be same string as column 3 if + there is no glossary applied. If input file extension is a + txt or html, the translation is directly written to the output + file. If glossary is requested, a separate + glossary\_translations\_file has format of gs://translation\_t + est/a\_b\_c\_'trg'\_glossary\_translations.[extension] The + format of errors file (for target language code 'trg') is: + gs://translation\_test/a\_b\_c\_'trg'\_errors.[extension] If + the input file extension is tsv, errors\_file contains the + following: Column 1: ID of the request provided in the input, + if it's not provided in the input, then the input row number + is used (0-based). Column 2: source sentence. Column 3: Error + detail for the translation. Could be empty. Column 4 (only + present if a glossary is provided in the request): Error when + applying the glossary. If the input file extension is txt or + html, glossary\_error\_file will be generated that contains + error details. glossary\_error\_file has format of gs://transl + ation\_test/a\_b\_c\_'trg'\_glossary\_errors.[extension] + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.OutputConfig) + ), +) +_sym_db.RegisterMessage(OutputConfig) + +BatchTranslateTextRequest = _reflection.GeneratedProtocolMessageType( + "BatchTranslateTextRequest", + (_message.Message,), + dict( + ModelsEntry=_reflection.GeneratedProtocolMessageType( + "ModelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHTRANSLATETEXTREQUEST_MODELSENTRY, + __module__="google.cloud.translation_v3.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchTranslateTextRequest.ModelsEntry) + ), + ), + GlossariesEntry=_reflection.GeneratedProtocolMessageType( + "GlossariesEntry", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY, + __module__="google.cloud.translation_v3.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchTranslateTextRequest.GlossariesEntry) + ), + ), + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHTRANSLATETEXTREQUEST_LABELSENTRY, + __module__="google.cloud.translation_v3.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchTranslateTextRequest.LabelsEntry) + ), + ), + DESCRIPTOR=_BATCHTRANSLATETEXTREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The batch translation request. + + + Attributes: + parent: + Required. Location to make a call. Must refer to a caller's + project. Format: ``projects/{project-number-or- + id}/locations/{location-id}``. The ``global`` location is not + supported for batch translation. Only AutoML Translation + models or glossaries within the same region (have the same + location-id) can be used, otherwise an INVALID\_ARGUMENT (400) + error is returned. + source_language_code: + Required. Source language code. + target_language_codes: + Required. Specify up to 10 language codes here. + models: + Optional. The models to use for translation. Map's key is + target language code. Map's value is model name. Value can be + a built-in general model, or an AutoML Translation model. The + value format depends on model type: - AutoML Translation + models: ``projects/{project-number-or- + id}/locations/{location-id}/models/{model-id}`` - General + (built-in) models: ``projects/{project-number-or- + id}/locations/{location-id}/models/general/nmt``, + ``projects/{project-number-or-id}/locations/{location- + id}/models/general/base`` If the map is empty or a specific + model is not requested for a language pair, then default + google model (nmt) is used. + input_configs: + Required. Input configurations. The total number of files + matched should be <= 1000. The total content size should be <= + 100M Unicode codepoints. The files must use UTF-8 encoding. + output_config: + Required. Output configuration. If 2 input configs match to + the same file (that is, same input path), we don't generate + output for duplicate inputs. + glossaries: + Optional. Glossaries to be applied for translation. It's keyed + by target language code. + labels: + Optional. The labels with user-defined metadata for the + request. Label keys and values can be no longer than 63 + characters (Unicode codepoints), can only contain lowercase + letters, numeric characters, underscores and dashes. + International characters are allowed. Label values are + optional. Label keys must start with a letter. See + https://cloud.google.com/translate/docs/labels for more + information. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchTranslateTextRequest) + ), +) +_sym_db.RegisterMessage(BatchTranslateTextRequest) +_sym_db.RegisterMessage(BatchTranslateTextRequest.ModelsEntry) +_sym_db.RegisterMessage(BatchTranslateTextRequest.GlossariesEntry) +_sym_db.RegisterMessage(BatchTranslateTextRequest.LabelsEntry) + +BatchTranslateMetadata = _reflection.GeneratedProtocolMessageType( + "BatchTranslateMetadata", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHTRANSLATEMETADATA, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""State metadata for the batch translation operation. + + + Attributes: + state: + The state of the operation. + translated_characters: + Number of successfully translated characters so far (Unicode + codepoints). + failed_characters: + Number of characters that have failed to process so far + (Unicode codepoints). + total_characters: + Total number of characters (Unicode codepoints). This is the + total number of codepoints from input files times the number + of target languages and appears here shortly after the call is + submitted. + submit_time: + Time when the operation was submitted. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchTranslateMetadata) + ), +) +_sym_db.RegisterMessage(BatchTranslateMetadata) + +BatchTranslateResponse = _reflection.GeneratedProtocolMessageType( + "BatchTranslateResponse", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHTRANSLATERESPONSE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Stored in the + [google.longrunning.Operation.response][google.longrunning.Operation.response] + field returned by BatchTranslateText if at least one sentence is + translated successfully. + + + Attributes: + total_characters: + Total number of characters (Unicode codepoints). + translated_characters: + Number of successfully translated characters (Unicode + codepoints). + failed_characters: + Number of characters that have failed to process (Unicode + codepoints). + submit_time: + Time when the operation was submitted. + end_time: + The time when the operation is finished and [google.longrunnin + g.Operation.done][google.longrunning.Operation.done] is set to + true. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchTranslateResponse) + ), +) +_sym_db.RegisterMessage(BatchTranslateResponse) + +GlossaryInputConfig = _reflection.GeneratedProtocolMessageType( + "GlossaryInputConfig", + (_message.Message,), + dict( + DESCRIPTOR=_GLOSSARYINPUTCONFIG, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Input configuration for glossaries. + + + Attributes: + source: + Required. Specify the input. + gcs_source: + Required. Google Cloud Storage location of glossary data. File + format is determined based on the filename extension. API + returns [google.rpc.Code.INVALID\_ARGUMENT] for unsupported + URI-s and file formats. Wildcards are not allowed. This must + be a single file in one of the following formats: For + unidirectional glossaries: - TSV/CSV (``.tsv``/``.csv``): 2 + column file, tab- or comma-separated. The first column is + source text. The second column is target text. The file + must not contain headers. That is, the first row is data, + not column names. - TMX (``.tmx``): TMX file with parallel + data defining source/target term pairs. For equivalent + term sets glossaries: - CSV (``.csv``): Multi-column CSV + file defining equivalent glossary terms in multiple + languages. The format is defined for Google Translation + Toolkit and documented in `Use a glossary `__. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.GlossaryInputConfig) + ), +) +_sym_db.RegisterMessage(GlossaryInputConfig) + +Glossary = _reflection.GeneratedProtocolMessageType( + "Glossary", + (_message.Message,), + dict( + LanguageCodePair=_reflection.GeneratedProtocolMessageType( + "LanguageCodePair", + (_message.Message,), + dict( + DESCRIPTOR=_GLOSSARY_LANGUAGECODEPAIR, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Used with unidirectional glossaries. + + + Attributes: + source_language_code: + Required. The BCP-47 language code of the input text, for + example, "en-US". Expected to be an exact match for + GlossaryTerm.language\_code. + target_language_code: + Required. The BCP-47 language code for translation output, for + example, "zh-CN". Expected to be an exact match for + GlossaryTerm.language\_code. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.Glossary.LanguageCodePair) + ), + ), + LanguageCodesSet=_reflection.GeneratedProtocolMessageType( + "LanguageCodesSet", + (_message.Message,), + dict( + DESCRIPTOR=_GLOSSARY_LANGUAGECODESSET, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Used with equivalent term set glossaries. + + + Attributes: + language_codes: + The BCP-47 language code(s) for terms defined in the glossary. + All entries are unique. The list contains at least two + entries. Expected to be an exact match for + GlossaryTerm.language\_code. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.Glossary.LanguageCodesSet) + ), + ), + DESCRIPTOR=_GLOSSARY, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Represents a glossary built from user provided data. + + + Attributes: + name: + Required. The resource name of the glossary. Glossary names + have the form ``projects/{project-number-or- + id}/locations/{location-id}/glossaries/{glossary-id}``. + languages: + Languages supported by the glossary. + language_pair: + Used with unidirectional glossaries. + language_codes_set: + Used with equivalent term set glossaries. + input_config: + Required. Provides examples to build the glossary from. Total + glossary must not exceed 10M Unicode codepoints. + entry_count: + Output only. The number of entries defined in the glossary. + submit_time: + Output only. When CreateGlossary was called. + end_time: + Output only. When the glossary creation was finished. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.Glossary) + ), +) +_sym_db.RegisterMessage(Glossary) +_sym_db.RegisterMessage(Glossary.LanguageCodePair) +_sym_db.RegisterMessage(Glossary.LanguageCodesSet) + +CreateGlossaryRequest = _reflection.GeneratedProtocolMessageType( + "CreateGlossaryRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEGLOSSARYREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Request message for CreateGlossary. + + + Attributes: + parent: + Required. The project name. + glossary: + Required. The glossary to create. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.CreateGlossaryRequest) + ), +) +_sym_db.RegisterMessage(CreateGlossaryRequest) + +GetGlossaryRequest = _reflection.GeneratedProtocolMessageType( + "GetGlossaryRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETGLOSSARYREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Request message for GetGlossary. + + + Attributes: + name: + Required. The name of the glossary to retrieve. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.GetGlossaryRequest) + ), +) +_sym_db.RegisterMessage(GetGlossaryRequest) + +DeleteGlossaryRequest = _reflection.GeneratedProtocolMessageType( + "DeleteGlossaryRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEGLOSSARYREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Request message for DeleteGlossary. + + + Attributes: + name: + Required. The name of the glossary to delete. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DeleteGlossaryRequest) + ), +) +_sym_db.RegisterMessage(DeleteGlossaryRequest) + +ListGlossariesRequest = _reflection.GeneratedProtocolMessageType( + "ListGlossariesRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTGLOSSARIESREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Request message for ListGlossaries. + + + Attributes: + parent: + Required. The name of the project from which to list all of + the glossaries. + page_size: + Optional. Requested page size. The server may return fewer + glossaries than requested. If unspecified, the server picks an + appropriate default. + page_token: + Optional. A token identifying a page of results the server + should return. Typically, this is the value of + [ListGlossariesResponse.next\_page\_token] returned from the + previous call to ``ListGlossaries`` method. The first page is + returned if ``page_token``\ is empty or missing. + filter: + Optional. Filter specifying constraints of a list operation. + Filtering is not supported yet, and the parameter currently + has no effect. If missing, no filtering is performed. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.ListGlossariesRequest) + ), +) +_sym_db.RegisterMessage(ListGlossariesRequest) + +ListGlossariesResponse = _reflection.GeneratedProtocolMessageType( + "ListGlossariesResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTGLOSSARIESRESPONSE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Response message for ListGlossaries. + + + Attributes: + glossaries: + The list of glossaries for a project. + next_page_token: + A token to retrieve a page of results. Pass this value in the + [ListGlossariesRequest.page\_token] field in the subsequent + call to ``ListGlossaries`` method to retrieve the next page of + results. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.ListGlossariesResponse) + ), +) +_sym_db.RegisterMessage(ListGlossariesResponse) + +CreateGlossaryMetadata = _reflection.GeneratedProtocolMessageType( + "CreateGlossaryMetadata", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEGLOSSARYMETADATA, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Stored in the + [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] + field returned by CreateGlossary. + + + Attributes: + name: + The name of the glossary that is being created. + state: + The current state of the glossary creation operation. + submit_time: + The time when the operation was submitted to the server. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.CreateGlossaryMetadata) + ), +) +_sym_db.RegisterMessage(CreateGlossaryMetadata) + +DeleteGlossaryMetadata = _reflection.GeneratedProtocolMessageType( + "DeleteGlossaryMetadata", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEGLOSSARYMETADATA, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Stored in the + [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] + field returned by DeleteGlossary. + + + Attributes: + name: + The name of the glossary that is being deleted. + state: + The current state of the glossary deletion operation. + submit_time: + The time when the operation was submitted to the server. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DeleteGlossaryMetadata) + ), +) +_sym_db.RegisterMessage(DeleteGlossaryMetadata) + +DeleteGlossaryResponse = _reflection.GeneratedProtocolMessageType( + "DeleteGlossaryResponse", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEGLOSSARYRESPONSE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Stored in the + [google.longrunning.Operation.response][google.longrunning.Operation.response] + field returned by DeleteGlossary. + + + Attributes: + name: + The name of the deleted glossary. + submit_time: + The time when the operation was submitted to the server. + end_time: + The time when the glossary deletion is finished and [google.lo + ngrunning.Operation.done][google.longrunning.Operation.done] + is set to true. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DeleteGlossaryResponse) + ), +) +_sym_db.RegisterMessage(DeleteGlossaryResponse) + + +DESCRIPTOR._options = None +_TRANSLATETEXTGLOSSARYCONFIG.fields_by_name["glossary"]._options = None +_TRANSLATETEXTGLOSSARYCONFIG.fields_by_name["ignore_case"]._options = None +_TRANSLATETEXTREQUEST_LABELSENTRY._options = None +_TRANSLATETEXTREQUEST.fields_by_name["contents"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["mime_type"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["source_language_code"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["target_language_code"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["parent"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["model"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["glossary_config"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["labels"]._options = None +_DETECTLANGUAGEREQUEST_LABELSENTRY._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["parent"]._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["model"]._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["mime_type"]._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["labels"]._options = None +_GETSUPPORTEDLANGUAGESREQUEST.fields_by_name["parent"]._options = None +_GETSUPPORTEDLANGUAGESREQUEST.fields_by_name["display_language_code"]._options = None +_GETSUPPORTEDLANGUAGESREQUEST.fields_by_name["model"]._options = None +_INPUTCONFIG.fields_by_name["mime_type"]._options = None +_BATCHTRANSLATETEXTREQUEST_MODELSENTRY._options = None +_BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY._options = None +_BATCHTRANSLATETEXTREQUEST_LABELSENTRY._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["parent"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["source_language_code"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["target_language_codes"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["models"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["input_configs"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["output_config"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["glossaries"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["labels"]._options = None +_GLOSSARY.fields_by_name["entry_count"]._options = None +_GLOSSARY.fields_by_name["submit_time"]._options = None +_GLOSSARY.fields_by_name["end_time"]._options = None +_GLOSSARY._options = None +_CREATEGLOSSARYREQUEST.fields_by_name["parent"]._options = None +_CREATEGLOSSARYREQUEST.fields_by_name["glossary"]._options = None +_GETGLOSSARYREQUEST.fields_by_name["name"]._options = None +_DELETEGLOSSARYREQUEST.fields_by_name["name"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["parent"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["page_size"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["page_token"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["filter"]._options = None + +_TRANSLATIONSERVICE = _descriptor.ServiceDescriptor( + name="TranslationService", + full_name="google.cloud.translation.v3.TranslationService", + file=DESCRIPTOR, + index=0, + serialized_options=_b( + "\312A\030translate.googleapis.com\322A`https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-translation" + ), + serialized_start=5516, + serialized_end=7578, + methods=[ + _descriptor.MethodDescriptor( + name="TranslateText", + full_name="google.cloud.translation.v3.TranslationService.TranslateText", + index=0, + containing_service=None, + input_type=_TRANSLATETEXTREQUEST, + output_type=_TRANSLATETEXTRESPONSE, + serialized_options=_b( + '\202\323\344\223\002b"1/v3/{parent=projects/*/locations/*}:translateText:\001*Z*"%/v3/{parent=projects/*}:translateText:\001*\332A$parent,target_language_code,contents\332AIparent,model,mime_type,source_language_code,target_language_code,contents' + ), + ), + _descriptor.MethodDescriptor( + name="DetectLanguage", + full_name="google.cloud.translation.v3.TranslationService.DetectLanguage", + index=1, + containing_service=None, + input_type=_DETECTLANGUAGEREQUEST, + output_type=_DETECTLANGUAGERESPONSE, + serialized_options=_b( + '\202\323\344\223\002d"2/v3/{parent=projects/*/locations/*}:detectLanguage:\001*Z+"&/v3/{parent=projects/*}:detectLanguage:\001*\332A\036parent,model,mime_type,content' + ), + ), + _descriptor.MethodDescriptor( + name="GetSupportedLanguages", + full_name="google.cloud.translation.v3.TranslationService.GetSupportedLanguages", + index=2, + containing_service=None, + input_type=_GETSUPPORTEDLANGUAGESREQUEST, + output_type=_SUPPORTEDLANGUAGES, + serialized_options=_b( + '\202\323\344\223\002f\0226/v3/{parent=projects/*/locations/*}/supportedLanguagesZ,\022*/v3/{parent=projects/*}/supportedLanguages\332A"parent,model,display_language_code' + ), + ), + _descriptor.MethodDescriptor( + name="BatchTranslateText", + full_name="google.cloud.translation.v3.TranslationService.BatchTranslateText", + index=3, + containing_service=None, + input_type=_BATCHTRANSLATETEXTREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002;"6/v3/{parent=projects/*/locations/*}:batchTranslateText:\001*\312A0\n\026BatchTranslateResponse\022\026BatchTranslateMetadata' + ), + ), + _descriptor.MethodDescriptor( + name="CreateGlossary", + full_name="google.cloud.translation.v3.TranslationService.CreateGlossary", + index=4, + containing_service=None, + input_type=_CREATEGLOSSARYREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002:"./v3/{parent=projects/*/locations/*}/glossaries:\010glossary\332A\017parent,glossary\312A"\n\010Glossary\022\026CreateGlossaryMetadata' + ), + ), + _descriptor.MethodDescriptor( + name="ListGlossaries", + full_name="google.cloud.translation.v3.TranslationService.ListGlossaries", + index=5, + containing_service=None, + input_type=_LISTGLOSSARIESREQUEST, + output_type=_LISTGLOSSARIESRESPONSE, + serialized_options=_b( + "\202\323\344\223\0020\022./v3/{parent=projects/*/locations/*}/glossaries\332A\006parent" + ), + ), + _descriptor.MethodDescriptor( + name="GetGlossary", + full_name="google.cloud.translation.v3.TranslationService.GetGlossary", + index=6, + containing_service=None, + input_type=_GETGLOSSARYREQUEST, + output_type=_GLOSSARY, + serialized_options=_b( + "\202\323\344\223\0020\022./v3/{name=projects/*/locations/*/glossaries/*}\332A\004name" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteGlossary", + full_name="google.cloud.translation.v3.TranslationService.DeleteGlossary", + index=7, + containing_service=None, + input_type=_DELETEGLOSSARYREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + "\202\323\344\223\0020*./v3/{name=projects/*/locations/*/glossaries/*}\332A\004name\312A0\n\026DeleteGlossaryResponse\022\026DeleteGlossaryMetadata" + ), + ), + ], +) +_sym_db.RegisterServiceDescriptor(_TRANSLATIONSERVICE) + +DESCRIPTOR.services_by_name["TranslationService"] = _TRANSLATIONSERVICE + +# @@protoc_insertion_point(module_scope) diff --git a/translate/google/cloud/translate_v3/proto/translation_service_pb2_grpc.py b/translate/google/cloud/translate_v3/proto/translation_service_pb2_grpc.py new file mode 100644 index 000000000000..5f7f9813ffbe --- /dev/null +++ b/translate/google/cloud/translate_v3/proto/translation_service_pb2_grpc.py @@ -0,0 +1,186 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.translate_v3.proto import ( + translation_service_pb2 as google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) + + +class TranslationServiceStub(object): + """Proto file for the Cloud Translation API (v3 GA). + + Provides natural language translation operations. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.TranslateText = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/TranslateText", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.TranslateTextRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.TranslateTextResponse.FromString, + ) + self.DetectLanguage = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DetectLanguage", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.DetectLanguageRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.DetectLanguageResponse.FromString, + ) + self.GetSupportedLanguages = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetSupportedLanguages", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.GetSupportedLanguagesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.SupportedLanguages.FromString, + ) + self.BatchTranslateText = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/BatchTranslateText", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.BatchTranslateTextRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.CreateGlossary = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/CreateGlossary", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.CreateGlossaryRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.ListGlossaries = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListGlossaries", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.ListGlossariesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.ListGlossariesResponse.FromString, + ) + self.GetGlossary = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetGlossary", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.GetGlossaryRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.Glossary.FromString, + ) + self.DeleteGlossary = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DeleteGlossary", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.DeleteGlossaryRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + + +class TranslationServiceServicer(object): + """Proto file for the Cloud Translation API (v3 GA). + + Provides natural language translation operations. + """ + + def TranslateText(self, request, context): + """Translates input text and returns translated text. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DetectLanguage(self, request, context): + """Detects the language of text within a request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetSupportedLanguages(self, request, context): + """Returns a list of supported languages for translation. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def BatchTranslateText(self, request, context): + """Translates a large volume of text in asynchronous batch mode. + This function provides real-time output as the inputs are being processed. + If caller cancels a request, the partial results (for an input file, it's + all or nothing) may still be available on the specified output location. + + This call returns immediately and you can + use google.longrunning.Operation.name to poll the status of the call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def CreateGlossary(self, request, context): + """Creates a glossary and returns the long-running operation. Returns + NOT_FOUND, if the project doesn't exist. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListGlossaries(self, request, context): + """Lists glossaries in a project. Returns NOT_FOUND, if the project doesn't + exist. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetGlossary(self, request, context): + """Gets a glossary. Returns NOT_FOUND, if the glossary doesn't + exist. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteGlossary(self, request, context): + """Deletes a glossary, or cancels glossary construction + if the glossary isn't created yet. + Returns NOT_FOUND, if the glossary doesn't exist. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_TranslationServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + "TranslateText": grpc.unary_unary_rpc_method_handler( + servicer.TranslateText, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.TranslateTextRequest.FromString, + response_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.TranslateTextResponse.SerializeToString, + ), + "DetectLanguage": grpc.unary_unary_rpc_method_handler( + servicer.DetectLanguage, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.DetectLanguageRequest.FromString, + response_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.DetectLanguageResponse.SerializeToString, + ), + "GetSupportedLanguages": grpc.unary_unary_rpc_method_handler( + servicer.GetSupportedLanguages, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.GetSupportedLanguagesRequest.FromString, + response_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.SupportedLanguages.SerializeToString, + ), + "BatchTranslateText": grpc.unary_unary_rpc_method_handler( + servicer.BatchTranslateText, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.BatchTranslateTextRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "CreateGlossary": grpc.unary_unary_rpc_method_handler( + servicer.CreateGlossary, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.CreateGlossaryRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ListGlossaries": grpc.unary_unary_rpc_method_handler( + servicer.ListGlossaries, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.ListGlossariesRequest.FromString, + response_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.ListGlossariesResponse.SerializeToString, + ), + "GetGlossary": grpc.unary_unary_rpc_method_handler( + servicer.GetGlossary, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.GetGlossaryRequest.FromString, + response_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.Glossary.SerializeToString, + ), + "DeleteGlossary": grpc.unary_unary_rpc_method_handler( + servicer.DeleteGlossary, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.DeleteGlossaryRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.cloud.translation.v3.TranslationService", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/types.py b/translate/google/cloud/translate_v3/types.py similarity index 87% rename from videointelligence/google/cloud/videointelligence_v1beta1/types.py rename to translate/google/cloud/translate_v3/types.py index f4b0fd1b0bde..14c494adc850 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/types.py +++ b/translate/google/cloud/translate_v3/types.py @@ -20,7 +20,7 @@ from google.api_core.protobuf_helpers import get_messages -from google.cloud.videointelligence_v1beta1.proto import video_intelligence_pb2 +from google.cloud.translate_v3.proto import translation_service_pb2 from google.longrunning import operations_pb2 from google.protobuf import any_pb2 from google.protobuf import timestamp_pb2 @@ -29,7 +29,7 @@ _shared_modules = [operations_pb2, any_pb2, timestamp_pb2, status_pb2] -_local_modules = [video_intelligence_pb2] +_local_modules = [translation_service_pb2] names = [] @@ -39,7 +39,7 @@ names.append(name) for module in _local_modules: for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.videointelligence_v1beta1.types" + message.__module__ = "google.cloud.translate_v3.types" setattr(sys.modules[__name__], name, message) names.append(name) diff --git a/translate/google/cloud/translate_v3beta1/gapic/translation_service_client.py b/translate/google/cloud/translate_v3beta1/gapic/translation_service_client.py index b0feb083369d..c62bacfd859b 100644 --- a/translate/google/cloud/translate_v3beta1/gapic/translation_service_client.py +++ b/translate/google/cloud/translate_v3beta1/gapic/translation_service_client.py @@ -214,11 +214,12 @@ def translate_text( self, contents, target_language_code, + parent, mime_type=None, source_language_code=None, - parent=None, model=None, glossary_config=None, + labels=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -236,8 +237,9 @@ def translate_text( >>> >>> # TODO: Initialize `target_language_code`: >>> target_language_code = '' + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') >>> - >>> response = client.translate_text(contents, target_language_code) + >>> response = client.translate_text(contents, target_language_code, parent) Args: contents (list[str]): Required. The content of the input in string format. @@ -245,6 +247,20 @@ def translate_text( Use BatchTranslateText for larger text. target_language_code (str): Required. The BCP-47 language code to use for translation of the input text, set to one of the language codes listed in Language Support. + parent (str): Required. Project or location to make a call. Must refer to a caller's + project. + + Format: ``projects/{project-id}`` or + ``projects/{project-id}/locations/{location-id}``. + + For global calls, use ``projects/{project-id}/locations/global`` or + ``projects/{project-id}``. + + Non-global location is required for requests using AutoML models or + custom glossaries. + + Models and glossaries must be within the same region (have same + location-id), otherwise an INVALID\_ARGUMENT (400) error is returned. mime_type (str): Optional. The format of the source text, for example, "text/html", "text/plain". If left blank, the MIME type defaults to "text/html". source_language_code (str): Optional. The BCP-47 language code of the input text if @@ -252,14 +268,6 @@ def translate_text( listed in Language Support. If the source language isn't specified, the API attempts to identify the source language automatically and returns the source language within the response. - parent (str): Required. Location to make a regional or global call. - - Format: ``projects/{project-id}/locations/{location-id}``. - - For global calls, use ``projects/{project-id}/locations/global``. - - Models and glossaries must be within the same region (have same - location-id), otherwise an INVALID\_ARGUMENT (400) error is returned. model (str): Optional. The ``model`` type requested for this translation. The format depends on model type: @@ -282,6 +290,14 @@ def translate_text( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.translate_v3beta1.types.TranslateTextGlossaryConfig` + labels (dict[str -> str]): Optional. The labels with user-defined metadata for the request. + + Label keys and values can be no longer than 63 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + Label values are optional. Label keys must start with a letter. + + See https://cloud.google.com/translate/docs/labels for more information. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -315,11 +331,12 @@ def translate_text( request = translation_service_pb2.TranslateTextRequest( contents=contents, target_language_code=target_language_code, + parent=parent, mime_type=mime_type, source_language_code=source_language_code, - parent=parent, model=model, glossary_config=glossary_config, + labels=labels, ) if metadata is None: metadata = [] @@ -340,10 +357,11 @@ def translate_text( def detect_language( self, - parent=None, + parent, model=None, content=None, mime_type=None, + labels=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -356,14 +374,19 @@ def detect_language( >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> - >>> response = client.detect_language() + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> response = client.detect_language(parent) Args: - parent (str): Required. Location to make a regional or global call. + parent (str): Required. Project or location to make a call. Must refer to a caller's + project. - Format: ``projects/{project-id}/locations/{location-id}``. + Format: ``projects/{project-id}/locations/{location-id}`` or + ``projects/{project-id}``. - For global calls, use ``projects/{project-id}/locations/global``. + For global calls, use ``projects/{project-id}/locations/global`` or + ``projects/{project-id}``. Only models within the same region (has same location-id) can be used. Otherwise an INVALID\_ARGUMENT (400) error is returned. @@ -379,6 +402,14 @@ def detect_language( content (str): The content of the input stored as a string. mime_type (str): Optional. The format of the source text, for example, "text/html", "text/plain". If left blank, the MIME type defaults to "text/html". + labels (dict[str -> str]): Optional. The labels with user-defined metadata for the request. + + Label keys and values can be no longer than 63 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + Label values are optional. Label keys must start with a letter. + + See https://cloud.google.com/translate/docs/labels for more information. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -414,7 +445,11 @@ def detect_language( google.api_core.protobuf_helpers.check_oneof(content=content) request = translation_service_pb2.DetectLanguageRequest( - parent=parent, model=model, content=content, mime_type=mime_type + parent=parent, + model=model, + content=content, + mime_type=mime_type, + labels=labels, ) if metadata is None: metadata = [] @@ -435,7 +470,7 @@ def detect_language( def get_supported_languages( self, - parent=None, + parent, display_language_code=None, model=None, retry=google.api_core.gapic_v1.method.DEFAULT, @@ -450,14 +485,21 @@ def get_supported_languages( >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> - >>> response = client.get_supported_languages() + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> response = client.get_supported_languages(parent) Args: - parent (str): Required. Location to make a regional or global call. + parent (str): Required. Project or location to make a call. Must refer to a caller's + project. - Format: ``projects/{project-id}/locations/{location-id}``. + Format: ``projects/{project-id}`` or + ``projects/{project-id}/locations/{location-id}``. - For global calls, use ``projects/{project-id}/locations/global``. + For global calls, use ``projects/{project-id}/locations/global`` or + ``projects/{project-id}``. + + Non-global location is required for AutoML models. Only models within the same region (have same location-id) can be used, otherwise an INVALID\_ARGUMENT (400) error is returned. @@ -529,13 +571,14 @@ def get_supported_languages( def batch_translate_text( self, + parent, source_language_code, target_language_codes, input_configs, output_config, - parent=None, models=None, glossaries=None, + labels=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -554,6 +597,8 @@ def batch_translate_text( >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> >>> # TODO: Initialize `source_language_code`: >>> source_language_code = '' >>> @@ -566,7 +611,7 @@ def batch_translate_text( >>> # TODO: Initialize `output_config`: >>> output_config = {} >>> - >>> response = client.batch_translate_text(source_language_code, target_language_codes, input_configs, output_config) + >>> response = client.batch_translate_text(parent, source_language_code, target_language_codes, input_configs, output_config) >>> >>> def callback(operation_future): ... # Handle result. @@ -578,6 +623,15 @@ def batch_translate_text( >>> metadata = response.metadata() Args: + parent (str): Required. Location to make a call. Must refer to a caller's project. + + Format: ``projects/{project-id}/locations/{location-id}``. + + The ``global`` location is not supported for batch translation. + + Only AutoML Translation models or glossaries within the same region + (have the same location-id) can be used, otherwise an INVALID\_ARGUMENT + (400) error is returned. source_language_code (str): Required. Source language code. target_language_codes (list[str]): Required. Specify up to 10 language codes here. input_configs (list[Union[dict, ~google.cloud.translate_v3beta1.types.InputConfig]]): Required. Input configurations. @@ -593,15 +647,6 @@ def batch_translate_text( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.translate_v3beta1.types.OutputConfig` - parent (str): Required. Location to make a regional call. - - Format: ``projects/{project-id}/locations/{location-id}``. - - The ``global`` location is not supported for batch translation. - - Only AutoML Translation models or glossaries within the same region - (have the same location-id) can be used, otherwise an INVALID\_ARGUMENT - (400) error is returned. models (dict[str -> str]): Optional. The models to use for translation. Map's key is target language code. Map's value is model name. Value can be a built-in general model, or an AutoML Translation model. @@ -622,6 +667,14 @@ def batch_translate_text( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.translate_v3beta1.types.TranslateTextGlossaryConfig` + labels (dict[str -> str]): Optional. The labels with user-defined metadata for the request. + + Label keys and values can be no longer than 63 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + Label values are optional. Label keys must start with a letter. + + See https://cloud.google.com/translate/docs/labels for more information. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -653,13 +706,14 @@ def batch_translate_text( ) request = translation_service_pb2.BatchTranslateTextRequest( + parent=parent, source_language_code=source_language_code, target_language_codes=target_language_codes, input_configs=input_configs, output_config=output_config, - parent=parent, models=models, glossaries=glossaries, + labels=labels, ) if metadata is None: metadata = [] @@ -781,7 +835,7 @@ def create_glossary( def list_glossaries( self, - parent=None, + parent, page_size=None, filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT, @@ -797,8 +851,10 @@ def list_glossaries( >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> >>> # Iterate over all results - >>> for element in client.list_glossaries(): + >>> for element in client.list_glossaries(parent): ... # process element ... pass >>> @@ -806,7 +862,7 @@ def list_glossaries( >>> # Alternatively: >>> >>> # Iterate over results one page at a time - >>> for page in client.list_glossaries().pages: + >>> for page in client.list_glossaries(parent).pages: ... for element in page: ... # process element ... pass diff --git a/translate/google/cloud/translate_v3beta1/gapic/translation_service_client_config.py b/translate/google/cloud/translate_v3beta1/gapic/translation_service_client_config.py index 21b7872180e8..af45b10d95d6 100644 --- a/translate/google/cloud/translate_v3beta1/gapic/translation_service_client_config.py +++ b/translate/google/cloud/translate_v3beta1/gapic/translation_service_client_config.py @@ -54,7 +54,7 @@ }, "DeleteGlossary": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, }, diff --git a/translate/google/cloud/translate_v3beta1/proto/translation_service.proto b/translate/google/cloud/translate_v3beta1/proto/translation_service.proto index fa20f01693f2..e62a50883822 100644 --- a/translate/google/cloud/translate_v3beta1/proto/translation_service.proto +++ b/translate/google/cloud/translate_v3beta1/proto/translation_service.proto @@ -18,10 +18,11 @@ syntax = "proto3"; package google.cloud.translation.v3beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/timestamp.proto"; -import "google/api/client.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Translate.V3Beta1"; @@ -36,7 +37,10 @@ option ruby_package = "Google::Cloud::Translate::V3beta1"; // Provides natural language translation operations. service TranslationService { - option (google.api.default_host) = "translation.googleapis.com"; + option (google.api.default_host) = "translate.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/cloud-translation"; // Translates input text and returns translated text. rpc TranslateText(TranslateTextRequest) returns (TranslateTextResponse) { @@ -60,6 +64,7 @@ service TranslationService { body: "*" } }; + option (google.api.method_signature) = "parent,model,mime_type"; } // Returns a list of supported languages for translation. @@ -70,6 +75,7 @@ service TranslationService { get: "/v3beta1/{parent=projects/*}/supportedLanguages" } }; + option (google.api.method_signature) = "parent,display_language_code,model"; } // Translates a large volume of text in asynchronous batch mode. @@ -84,6 +90,10 @@ service TranslationService { post: "/v3beta1/{parent=projects/*/locations/*}:batchTranslateText" body: "*" }; + option (google.longrunning.operation_info) = { + response_type: "BatchTranslateResponse" + metadata_type: "BatchTranslateMetadata" + }; } // Creates a glossary and returns the long-running operation. Returns @@ -93,6 +103,11 @@ service TranslationService { post: "/v3beta1/{parent=projects/*/locations/*}/glossaries" body: "glossary" }; + option (google.api.method_signature) = "parent,glossary"; + option (google.longrunning.operation_info) = { + response_type: "Glossary" + metadata_type: "CreateGlossaryMetadata" + }; } // Lists glossaries in a project. Returns NOT_FOUND, if the project doesn't @@ -101,6 +116,8 @@ service TranslationService { option (google.api.http) = { get: "/v3beta1/{parent=projects/*/locations/*}/glossaries" }; + option (google.api.method_signature) = "parent"; + option (google.api.method_signature) = "parent,filter"; } // Gets a glossary. Returns NOT_FOUND, if the glossary doesn't @@ -109,6 +126,7 @@ service TranslationService { option (google.api.http) = { get: "/v3beta1/{name=projects/*/locations/*/glossaries/*}" }; + option (google.api.method_signature) = "name"; } // Deletes a glossary, or cancels glossary construction @@ -118,6 +136,11 @@ service TranslationService { option (google.api.http) = { delete: "/v3beta1/{name=projects/*/locations/*/glossaries/*}" }; + option (google.api.method_signature) = "name"; + option (google.longrunning.operation_info) = { + response_type: "DeleteGlossaryResponse" + metadata_type: "DeleteGlossaryMetadata" + }; } } @@ -126,11 +149,11 @@ service TranslationService { message TranslateTextGlossaryConfig { // Required. Specifies the glossary used for this translation. Use // this format: projects/*/locations/*/glossaries/* - string glossary = 1; + string glossary = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Indicates match is case-insensitive. // Default value is false if missing. - bool ignore_case = 2; + bool ignore_case = 2 [(google.api.field_behavior) = OPTIONAL]; } // The request message for synchronous translation. @@ -138,32 +161,43 @@ message TranslateTextRequest { // Required. The content of the input in string format. // We recommend the total content be less than 30k codepoints. // Use BatchTranslateText for larger text. - repeated string contents = 1; + repeated string contents = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The format of the source text, for example, "text/html", // "text/plain". If left blank, the MIME type defaults to "text/html". - string mime_type = 3; + string mime_type = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. The BCP-47 language code of the input text if // known, for example, "en-US" or "sr-Latn". Supported language codes are // listed in Language Support. If the source language isn't specified, the API // attempts to identify the source language automatically and returns the // source language within the response. - string source_language_code = 4; + string source_language_code = 4 [(google.api.field_behavior) = OPTIONAL]; // Required. The BCP-47 language code to use for translation of the input // text, set to one of the language codes listed in Language Support. - string target_language_code = 5; + string target_language_code = 5 [(google.api.field_behavior) = REQUIRED]; - // Required. Location to make a regional or global call. + // Required. Project or location to make a call. Must refer to a caller's + // project. // - // Format: `projects/{project-id}/locations/{location-id}`. + // Format: `projects/{project-id}` or + // `projects/{project-id}/locations/{location-id}`. // - // For global calls, use `projects/{project-id}/locations/global`. + // For global calls, use `projects/{project-id}/locations/global` or + // `projects/{project-id}`. + // + // Non-global location is required for requests using AutoML models or + // custom glossaries. // // Models and glossaries must be within the same region (have same // location-id), otherwise an INVALID_ARGUMENT (400) error is returned. - string parent = 8; + string parent = 8 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Optional. The `model` type requested for this translation. // @@ -182,12 +216,22 @@ message TranslateTextRequest { // `projects/{project-id}/locations/global/models/general/nmt`. // // If missing, the system decides which google base model to use. - string model = 6; + string model = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. Glossary to be applied. The glossary must be // within the same region (have the same location-id) as the model, otherwise // an INVALID_ARGUMENT (400) error is returned. - TranslateTextGlossaryConfig glossary_config = 7; + TranslateTextGlossaryConfig glossary_config = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels with user-defined metadata for the request. + // + // Label keys and values can be no longer than 63 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://cloud.google.com/translate/docs/labels for more information. + map labels = 10 [(google.api.field_behavior) = OPTIONAL]; } message TranslateTextResponse { @@ -225,15 +269,23 @@ message Translation { // The request message for language detection. message DetectLanguageRequest { - // Required. Location to make a regional or global call. + // Required. Project or location to make a call. Must refer to a caller's + // project. // - // Format: `projects/{project-id}/locations/{location-id}`. + // Format: `projects/{project-id}/locations/{location-id}` or + // `projects/{project-id}`. // - // For global calls, use `projects/{project-id}/locations/global`. + // For global calls, use `projects/{project-id}/locations/global` or + // `projects/{project-id}`. // // Only models within the same region (has same location-id) can be used. // Otherwise an INVALID_ARGUMENT (400) error is returned. - string parent = 5; + string parent = 5 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Optional. The language detection model to be used. // @@ -244,7 +296,7 @@ message DetectLanguageRequest { // `projects/{project-id}/locations/{location-id}/models/language-detection/default`. // // If not specified, the default model is used. - string model = 4; + string model = 4 [(google.api.field_behavior) = OPTIONAL]; // Required. The source of the document from which to detect the language. oneof source { @@ -254,7 +306,17 @@ message DetectLanguageRequest { // Optional. The format of the source text, for example, "text/html", // "text/plain". If left blank, the MIME type defaults to "text/html". - string mime_type = 3; + string mime_type = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels with user-defined metadata for the request. + // + // Label keys and values can be no longer than 63 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://cloud.google.com/translate/docs/labels for more information. + map labels = 6; } // The response message for language detection. @@ -276,20 +338,30 @@ message DetectLanguageResponse { // The request message for discovering supported languages. message GetSupportedLanguagesRequest { - // Required. Location to make a regional or global call. + // Required. Project or location to make a call. Must refer to a caller's + // project. // - // Format: `projects/{project-id}/locations/{location-id}`. + // Format: `projects/{project-id}` or + // `projects/{project-id}/locations/{location-id}`. // - // For global calls, use `projects/{project-id}/locations/global`. + // For global calls, use `projects/{project-id}/locations/global` or + // `projects/{project-id}`. + // + // Non-global location is required for AutoML models. // // Only models within the same region (have same location-id) can be used, // otherwise an INVALID_ARGUMENT (400) error is returned. - string parent = 3; + string parent = 3 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Optional. The language to use to return localized, human readable names // of supported languages. If missing, then display names are not returned // in a response. - string display_language_code = 1; + string display_language_code = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. Get supported languages of this model. // @@ -305,7 +377,7 @@ message GetSupportedLanguagesRequest { // // Returns languages supported by the specified model. // If missing, we get supported languages of Google general base (PBMT) model. - string model = 2; + string model = 2 [(google.api.field_behavior) = OPTIONAL]; } // The response message for discovering supported languages. @@ -338,7 +410,7 @@ message SupportedLanguage { // The Google Cloud Storage location for the input content. message GcsSource { // Required. Source data URI. For example, `gs://my_bucket/my_object`. - string input_uri = 1; + string input_uri = 1 [(google.api.field_behavior) = REQUIRED]; } // Input configuration for BatchTranslateText request. @@ -347,7 +419,7 @@ message InputConfig { // For `.tsv`, "text/html" is used if mime_type is missing. // For `.html`, this field must be "text/html" or empty. // For `.txt`, this field must be "text/plain" or empty. - string mime_type = 1; + string mime_type = 1 [(google.api.field_behavior) = OPTIONAL]; // Required. Specify the input. oneof source { @@ -373,12 +445,12 @@ message InputConfig { } } -// The Google Cloud Storage location for the output content +// The Google Cloud Storage location for the output content. message GcsDestination { // Required. There must be no files under 'output_uri_prefix'. - // 'output_uri_prefix' must end with "/", otherwise an INVALID_ARGUMENT (400) - // error is returned.. - string output_uri_prefix = 1; + // 'output_uri_prefix' must end with "/" and start with "gs://", otherwise an + // INVALID_ARGUMENT (400) error is returned. + string output_uri_prefix = 1 [(google.api.field_behavior) = REQUIRED]; } // Output configuration for BatchTranslateText request. @@ -457,7 +529,7 @@ message OutputConfig { // The batch translation request. message BatchTranslateTextRequest { - // Required. Location to make a regional call. + // Required. Location to make a call. Must refer to a caller's project. // // Format: `projects/{project-id}/locations/{location-id}`. // @@ -466,13 +538,18 @@ message BatchTranslateTextRequest { // Only AutoML Translation models or glossaries within the same region (have // the same location-id) can be used, otherwise an INVALID_ARGUMENT (400) // error is returned. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Required. Source language code. - string source_language_code = 2; + string source_language_code = 2 [(google.api.field_behavior) = REQUIRED]; // Required. Specify up to 10 language codes here. - repeated string target_language_codes = 3; + repeated string target_language_codes = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. The models to use for translation. Map's key is target language // code. Map's value is model name. Value can be a built-in general model, @@ -490,22 +567,32 @@ message BatchTranslateTextRequest { // // If the map is empty or a specific model is // not requested for a language pair, then default google model (nmt) is used. - map models = 4; + map models = 4 [(google.api.field_behavior) = OPTIONAL]; // Required. Input configurations. // The total number of files matched should be <= 1000. // The total content size should be <= 100M Unicode codepoints. // The files must use UTF-8 encoding. - repeated InputConfig input_configs = 5; + repeated InputConfig input_configs = 5 [(google.api.field_behavior) = REQUIRED]; // Required. Output configuration. // If 2 input configs match to the same file (that is, same input path), // we don't generate output for duplicate inputs. - OutputConfig output_config = 6; + OutputConfig output_config = 6 [(google.api.field_behavior) = REQUIRED]; // Optional. Glossaries to be applied for translation. // It's keyed by target language code. - map glossaries = 7; + map glossaries = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels with user-defined metadata for the request. + // + // Label keys and values can be no longer than 63 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://cloud.google.com/translate/docs/labels for more information. + map labels = 9 [(google.api.field_behavior) = OPTIONAL]; } // State metadata for the batch translation operation. @@ -606,6 +693,11 @@ message GlossaryInputConfig { // Represents a glossary built from user provided data. message Glossary { + option (google.api.resource) = { + type: "translate.googleapis.com/Glossary" + pattern: "projects/{project}/locations/{location}/glossaries/{glossary}" + }; + // Used with unidirectional glossaries. message LanguageCodePair { // Required. The BCP-47 language code of the input text, for example, @@ -627,7 +719,7 @@ message Glossary { // Required. The resource name of the glossary. Glossary names have the form // `projects/{project-id}/locations/{location-id}/glossaries/{glossary-id}`. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Languages supported by the glossary. oneof languages { @@ -643,55 +735,75 @@ message Glossary { GlossaryInputConfig input_config = 5; // Output only. The number of entries defined in the glossary. - int32 entry_count = 6; + int32 entry_count = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. When CreateGlossary was called. - google.protobuf.Timestamp submit_time = 7; + google.protobuf.Timestamp submit_time = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. When the glossary creation was finished. - google.protobuf.Timestamp end_time = 8; + google.protobuf.Timestamp end_time = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Request message for CreateGlossary. message CreateGlossaryRequest { // Required. The project name. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Required. The glossary to create. - Glossary glossary = 2; + Glossary glossary = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for GetGlossary. message GetGlossaryRequest { // Required. The name of the glossary to retrieve. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "translate.googleapis.com/Glossary" + } + ]; } // Request message for DeleteGlossary. message DeleteGlossaryRequest { // Required. The name of the glossary to delete. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "translate.googleapis.com/Glossary" + } + ]; } // Request message for ListGlossaries. message ListGlossariesRequest { // Required. The name of the project from which to list all of the glossaries. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Optional. Requested page size. The server may return fewer glossaries than // requested. If unspecified, the server picks an appropriate default. - int32 page_size = 2; + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. A token identifying a page of results the server should return. // Typically, this is the value of [ListGlossariesResponse.next_page_token] // returned from the previous call to `ListGlossaries` method. // The first page is returned if `page_token`is empty or missing. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Filter specifying constraints of a list operation. // Filtering is not supported yet, and the parameter currently has no effect. // If missing, no filtering is performed. - string filter = 4; + string filter = 4 [(google.api.field_behavior) = OPTIONAL]; } // Response message for ListGlossaries. diff --git a/translate/google/cloud/translate_v3beta1/proto/translation_service_pb2.py b/translate/google/cloud/translate_v3beta1/proto/translation_service_pb2.py index 60dfc368b645..8a423d69322f 100644 --- a/translate/google/cloud/translate_v3beta1/proto/translation_service_pb2.py +++ b/translate/google/cloud/translate_v3beta1/proto/translation_service_pb2.py @@ -16,12 +16,13 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.longrunning import ( operations_pb2 as google_dot_longrunning_dot_operations__pb2, ) from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -32,14 +33,15 @@ '\n"com.google.cloud.translate.v3beta1B\027TranslationServiceProtoP\001ZGgoogle.golang.org/genproto/googleapis/cloud/translate/v3beta1;translate\370\001\001\252\002\036Google.Cloud.Translate.V3Beta1\312\002\036Google\\Cloud\\Translate\\V3beta1\352\002!Google::Cloud::Translate::V3beta1' ), serialized_pb=_b( - '\n@google/cloud/translation_v3beta1/proto/translation_service.proto\x12 google.cloud.translation.v3beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/api/client.proto"D\n\x1bTranslateTextGlossaryConfig\x12\x10\n\x08glossary\x18\x01 \x01(\t\x12\x13\n\x0bignore_case\x18\x02 \x01(\x08"\xee\x01\n\x14TranslateTextRequest\x12\x10\n\x08\x63ontents\x18\x01 \x03(\t\x12\x11\n\tmime_type\x18\x03 \x01(\t\x12\x1c\n\x14source_language_code\x18\x04 \x01(\t\x12\x1c\n\x14target_language_code\x18\x05 \x01(\t\x12\x0e\n\x06parent\x18\x08 \x01(\t\x12\r\n\x05model\x18\x06 \x01(\t\x12V\n\x0fglossary_config\x18\x07 \x01(\x0b\x32=.google.cloud.translation.v3beta1.TranslateTextGlossaryConfig"\xaa\x01\n\x15TranslateTextResponse\x12\x43\n\x0ctranslations\x18\x01 \x03(\x0b\x32-.google.cloud.translation.v3beta1.Translation\x12L\n\x15glossary_translations\x18\x03 \x03(\x0b\x32-.google.cloud.translation.v3beta1.Translation"\xad\x01\n\x0bTranslation\x12\x17\n\x0ftranslated_text\x18\x01 \x01(\t\x12\r\n\x05model\x18\x02 \x01(\t\x12\x1e\n\x16\x64\x65tected_language_code\x18\x04 \x01(\t\x12V\n\x0fglossary_config\x18\x03 \x01(\x0b\x32=.google.cloud.translation.v3beta1.TranslateTextGlossaryConfig"f\n\x15\x44\x65tectLanguageRequest\x12\x0e\n\x06parent\x18\x05 \x01(\t\x12\r\n\x05model\x18\x04 \x01(\t\x12\x11\n\x07\x63ontent\x18\x01 \x01(\tH\x00\x12\x11\n\tmime_type\x18\x03 \x01(\tB\x08\n\x06source"=\n\x10\x44\x65tectedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02"_\n\x16\x44\x65tectLanguageResponse\x12\x45\n\tlanguages\x18\x01 \x03(\x0b\x32\x32.google.cloud.translation.v3beta1.DetectedLanguage"\\\n\x1cGetSupportedLanguagesRequest\x12\x0e\n\x06parent\x18\x03 \x01(\t\x12\x1d\n\x15\x64isplay_language_code\x18\x01 \x01(\t\x12\r\n\x05model\x18\x02 \x01(\t"\\\n\x12SupportedLanguages\x12\x46\n\tlanguages\x18\x01 \x03(\x0b\x32\x33.google.cloud.translation.v3beta1.SupportedLanguage"p\n\x11SupportedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x16\n\x0esupport_source\x18\x03 \x01(\x08\x12\x16\n\x0esupport_target\x18\x04 \x01(\x08"\x1e\n\tGcsSource\x12\x11\n\tinput_uri\x18\x01 \x01(\t"m\n\x0bInputConfig\x12\x11\n\tmime_type\x18\x01 \x01(\t\x12\x41\n\ngcs_source\x18\x02 \x01(\x0b\x32+.google.cloud.translation.v3beta1.GcsSourceH\x00\x42\x08\n\x06source"+\n\x0eGcsDestination\x12\x19\n\x11output_uri_prefix\x18\x01 \x01(\t"j\n\x0cOutputConfig\x12K\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32\x30.google.cloud.translation.v3beta1.GcsDestinationH\x00\x42\r\n\x0b\x64\x65stination"\xd0\x04\n\x19\x42\x61tchTranslateTextRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x1c\n\x14source_language_code\x18\x02 \x01(\t\x12\x1d\n\x15target_language_codes\x18\x03 \x03(\t\x12W\n\x06models\x18\x04 \x03(\x0b\x32G.google.cloud.translation.v3beta1.BatchTranslateTextRequest.ModelsEntry\x12\x44\n\rinput_configs\x18\x05 \x03(\x0b\x32-.google.cloud.translation.v3beta1.InputConfig\x12\x45\n\routput_config\x18\x06 \x01(\x0b\x32..google.cloud.translation.v3beta1.OutputConfig\x12_\n\nglossaries\x18\x07 \x03(\x0b\x32K.google.cloud.translation.v3beta1.BatchTranslateTextRequest.GlossariesEntry\x1a-\n\x0bModelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1ap\n\x0fGlossariesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12L\n\x05value\x18\x02 \x01(\x0b\x32=.google.cloud.translation.v3beta1.TranslateTextGlossaryConfig:\x02\x38\x01"\xd3\x02\n\x16\x42\x61tchTranslateMetadata\x12M\n\x05state\x18\x01 \x01(\x0e\x32>.google.cloud.translation.v3beta1.BatchTranslateMetadata.State\x12\x1d\n\x15translated_characters\x18\x02 \x01(\x03\x12\x19\n\x11\x66\x61iled_characters\x18\x03 \x01(\x03\x12\x18\n\x10total_characters\x18\x04 \x01(\x03\x12/\n\x0bsubmit_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\xcb\x01\n\x16\x42\x61tchTranslateResponse\x12\x18\n\x10total_characters\x18\x01 \x01(\x03\x12\x1d\n\x15translated_characters\x18\x02 \x01(\x03\x12\x19\n\x11\x66\x61iled_characters\x18\x03 \x01(\x03\x12/\n\x0bsubmit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"b\n\x13GlossaryInputConfig\x12\x41\n\ngcs_source\x18\x01 \x01(\x0b\x32+.google.cloud.translation.v3beta1.GcsSourceH\x00\x42\x08\n\x06source"\x93\x04\n\x08Glossary\x12\x0c\n\x04name\x18\x01 \x01(\t\x12T\n\rlanguage_pair\x18\x03 \x01(\x0b\x32;.google.cloud.translation.v3beta1.Glossary.LanguageCodePairH\x00\x12Y\n\x12language_codes_set\x18\x04 \x01(\x0b\x32;.google.cloud.translation.v3beta1.Glossary.LanguageCodesSetH\x00\x12K\n\x0cinput_config\x18\x05 \x01(\x0b\x32\x35.google.cloud.translation.v3beta1.GlossaryInputConfig\x12\x13\n\x0b\x65ntry_count\x18\x06 \x01(\x05\x12/\n\x0bsubmit_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1aN\n\x10LanguageCodePair\x12\x1c\n\x14source_language_code\x18\x01 \x01(\t\x12\x1c\n\x14target_language_code\x18\x02 \x01(\t\x1a*\n\x10LanguageCodesSet\x12\x16\n\x0elanguage_codes\x18\x01 \x03(\tB\x0b\n\tlanguages"e\n\x15\x43reateGlossaryRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12<\n\x08glossary\x18\x02 \x01(\x0b\x32*.google.cloud.translation.v3beta1.Glossary""\n\x12GetGlossaryRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"%\n\x15\x44\x65leteGlossaryRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"^\n\x15ListGlossariesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x04 \x01(\t"q\n\x16ListGlossariesResponse\x12>\n\nglossaries\x18\x01 \x03(\x0b\x32*.google.cloud.translation.v3beta1.Glossary\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x8d\x02\n\x16\x43reateGlossaryMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12M\n\x05state\x18\x02 \x01(\x0e\x32>.google.cloud.translation.v3beta1.CreateGlossaryMetadata.State\x12/\n\x0bsubmit_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\x8d\x02\n\x16\x44\x65leteGlossaryMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12M\n\x05state\x18\x02 \x01(\x0e\x32>.google.cloud.translation.v3beta1.DeleteGlossaryMetadata.State\x12/\n\x0bsubmit_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\x85\x01\n\x16\x44\x65leteGlossaryResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0bsubmit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp2\xb5\r\n\x12TranslationService\x12\xf4\x01\n\rTranslateText\x12\x36.google.cloud.translation.v3beta1.TranslateTextRequest\x1a\x37.google.cloud.translation.v3beta1.TranslateTextResponse"r\x82\xd3\xe4\x93\x02l"6/v3beta1/{parent=projects/*/locations/*}:translateText:\x01*Z/"*/v3beta1/{parent=projects/*}:translateText:\x01*\x12\xf9\x01\n\x0e\x44\x65tectLanguage\x12\x37.google.cloud.translation.v3beta1.DetectLanguageRequest\x1a\x38.google.cloud.translation.v3beta1.DetectLanguageResponse"t\x82\xd3\xe4\x93\x02n"7/v3beta1/{parent=projects/*/locations/*}:detectLanguage:\x01*Z0"+/v3beta1/{parent=projects/*}:detectLanguage:\x01*\x12\x85\x02\n\x15GetSupportedLanguages\x12>.google.cloud.translation.v3beta1.GetSupportedLanguagesRequest\x1a\x34.google.cloud.translation.v3beta1.SupportedLanguages"v\x82\xd3\xe4\x93\x02p\x12;/v3beta1/{parent=projects/*/locations/*}/supportedLanguagesZ1\x12//v3beta1/{parent=projects/*}/supportedLanguages\x12\xb8\x01\n\x12\x42\x61tchTranslateText\x12;.google.cloud.translation.v3beta1.BatchTranslateTextRequest\x1a\x1d.google.longrunning.Operation"F\x82\xd3\xe4\x93\x02@";/v3beta1/{parent=projects/*/locations/*}:batchTranslateText:\x01*\x12\xaf\x01\n\x0e\x43reateGlossary\x12\x37.google.cloud.translation.v3beta1.CreateGlossaryRequest\x1a\x1d.google.longrunning.Operation"E\x82\xd3\xe4\x93\x02?"3/v3beta1/{parent=projects/*/locations/*}/glossaries:\x08glossary\x12\xc0\x01\n\x0eListGlossaries\x12\x37.google.cloud.translation.v3beta1.ListGlossariesRequest\x1a\x38.google.cloud.translation.v3beta1.ListGlossariesResponse";\x82\xd3\xe4\x93\x02\x35\x12\x33/v3beta1/{parent=projects/*/locations/*}/glossaries\x12\xac\x01\n\x0bGetGlossary\x12\x34.google.cloud.translation.v3beta1.GetGlossaryRequest\x1a*.google.cloud.translation.v3beta1.Glossary";\x82\xd3\xe4\x93\x02\x35\x12\x33/v3beta1/{name=projects/*/locations/*/glossaries/*}\x12\xa5\x01\n\x0e\x44\x65leteGlossary\x12\x37.google.cloud.translation.v3beta1.DeleteGlossaryRequest\x1a\x1d.google.longrunning.Operation";\x82\xd3\xe4\x93\x02\x35*3/v3beta1/{name=projects/*/locations/*/glossaries/*}\x1a\x1d\xca\x41\x1atranslation.googleapis.comB\xf1\x01\n"com.google.cloud.translate.v3beta1B\x17TranslationServiceProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/translate/v3beta1;translate\xf8\x01\x01\xaa\x02\x1eGoogle.Cloud.Translate.V3Beta1\xca\x02\x1eGoogle\\Cloud\\Translate\\V3beta1\xea\x02!Google::Cloud::Translate::V3beta1b\x06proto3' + '\n@google/cloud/translation_v3beta1/proto/translation_service.proto\x12 google.cloud.translation.v3beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"N\n\x1bTranslateTextGlossaryConfig\x12\x15\n\x08glossary\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bignore_case\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01"\xbf\x03\n\x14TranslateTextRequest\x12\x15\n\x08\x63ontents\x18\x01 \x03(\tB\x03\xe0\x41\x02\x12\x16\n\tmime_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12!\n\x14source_language_code\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12!\n\x14target_language_code\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x06parent\x18\x08 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x12\n\x05model\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12[\n\x0fglossary_config\x18\x07 \x01(\x0b\x32=.google.cloud.translation.v3beta1.TranslateTextGlossaryConfigB\x03\xe0\x41\x01\x12W\n\x06labels\x18\n \x03(\x0b\x32\x42.google.cloud.translation.v3beta1.TranslateTextRequest.LabelsEntryB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xaa\x01\n\x15TranslateTextResponse\x12\x43\n\x0ctranslations\x18\x01 \x03(\x0b\x32-.google.cloud.translation.v3beta1.Translation\x12L\n\x15glossary_translations\x18\x03 \x03(\x0b\x32-.google.cloud.translation.v3beta1.Translation"\xad\x01\n\x0bTranslation\x12\x17\n\x0ftranslated_text\x18\x01 \x01(\t\x12\r\n\x05model\x18\x02 \x01(\t\x12\x1e\n\x16\x64\x65tected_language_code\x18\x04 \x01(\t\x12V\n\x0fglossary_config\x18\x03 \x01(\x0b\x32=.google.cloud.translation.v3beta1.TranslateTextGlossaryConfig"\x9f\x02\n\x15\x44\x65tectLanguageRequest\x12\x39\n\x06parent\x18\x05 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x12\n\x05model\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x11\n\x07\x63ontent\x18\x01 \x01(\tH\x00\x12\x16\n\tmime_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12S\n\x06labels\x18\x06 \x03(\x0b\x32\x43.google.cloud.translation.v3beta1.DetectLanguageRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06source"=\n\x10\x44\x65tectedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02"_\n\x16\x44\x65tectLanguageResponse\x12\x45\n\tlanguages\x18\x01 \x03(\x0b\x32\x32.google.cloud.translation.v3beta1.DetectedLanguage"\x91\x01\n\x1cGetSupportedLanguagesRequest\x12\x39\n\x06parent\x18\x03 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12"\n\x15\x64isplay_language_code\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x12\n\x05model\x18\x02 \x01(\tB\x03\xe0\x41\x01"\\\n\x12SupportedLanguages\x12\x46\n\tlanguages\x18\x01 \x03(\x0b\x32\x33.google.cloud.translation.v3beta1.SupportedLanguage"p\n\x11SupportedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x16\n\x0esupport_source\x18\x03 \x01(\x08\x12\x16\n\x0esupport_target\x18\x04 \x01(\x08"#\n\tGcsSource\x12\x16\n\tinput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02"r\n\x0bInputConfig\x12\x16\n\tmime_type\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x41\n\ngcs_source\x18\x02 \x01(\x0b\x32+.google.cloud.translation.v3beta1.GcsSourceH\x00\x42\x08\n\x06source"0\n\x0eGcsDestination\x12\x1e\n\x11output_uri_prefix\x18\x01 \x01(\tB\x03\xe0\x41\x02"j\n\x0cOutputConfig\x12K\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32\x30.google.cloud.translation.v3beta1.GcsDestinationH\x00\x42\r\n\x0b\x64\x65stination"\xa6\x06\n\x19\x42\x61tchTranslateTextRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12!\n\x14source_language_code\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12"\n\x15target_language_codes\x18\x03 \x03(\tB\x03\xe0\x41\x02\x12\\\n\x06models\x18\x04 \x03(\x0b\x32G.google.cloud.translation.v3beta1.BatchTranslateTextRequest.ModelsEntryB\x03\xe0\x41\x01\x12I\n\rinput_configs\x18\x05 \x03(\x0b\x32-.google.cloud.translation.v3beta1.InputConfigB\x03\xe0\x41\x02\x12J\n\routput_config\x18\x06 \x01(\x0b\x32..google.cloud.translation.v3beta1.OutputConfigB\x03\xe0\x41\x02\x12\x64\n\nglossaries\x18\x07 \x03(\x0b\x32K.google.cloud.translation.v3beta1.BatchTranslateTextRequest.GlossariesEntryB\x03\xe0\x41\x01\x12\\\n\x06labels\x18\t \x03(\x0b\x32G.google.cloud.translation.v3beta1.BatchTranslateTextRequest.LabelsEntryB\x03\xe0\x41\x01\x1a-\n\x0bModelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1ap\n\x0fGlossariesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12L\n\x05value\x18\x02 \x01(\x0b\x32=.google.cloud.translation.v3beta1.TranslateTextGlossaryConfig:\x02\x38\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xd3\x02\n\x16\x42\x61tchTranslateMetadata\x12M\n\x05state\x18\x01 \x01(\x0e\x32>.google.cloud.translation.v3beta1.BatchTranslateMetadata.State\x12\x1d\n\x15translated_characters\x18\x02 \x01(\x03\x12\x19\n\x11\x66\x61iled_characters\x18\x03 \x01(\x03\x12\x18\n\x10total_characters\x18\x04 \x01(\x03\x12/\n\x0bsubmit_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\xcb\x01\n\x16\x42\x61tchTranslateResponse\x12\x18\n\x10total_characters\x18\x01 \x01(\x03\x12\x1d\n\x15translated_characters\x18\x02 \x01(\x03\x12\x19\n\x11\x66\x61iled_characters\x18\x03 \x01(\x03\x12/\n\x0bsubmit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"b\n\x13GlossaryInputConfig\x12\x41\n\ngcs_source\x18\x01 \x01(\x0b\x32+.google.cloud.translation.v3beta1.GcsSourceH\x00\x42\x08\n\x06source"\x8e\x05\n\x08Glossary\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12T\n\rlanguage_pair\x18\x03 \x01(\x0b\x32;.google.cloud.translation.v3beta1.Glossary.LanguageCodePairH\x00\x12Y\n\x12language_codes_set\x18\x04 \x01(\x0b\x32;.google.cloud.translation.v3beta1.Glossary.LanguageCodesSetH\x00\x12K\n\x0cinput_config\x18\x05 \x01(\x0b\x32\x35.google.cloud.translation.v3beta1.GlossaryInputConfig\x12\x18\n\x0b\x65ntry_count\x18\x06 \x01(\x05\x42\x03\xe0\x41\x03\x12\x34\n\x0bsubmit_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x1aN\n\x10LanguageCodePair\x12\x1c\n\x14source_language_code\x18\x01 \x01(\t\x12\x1c\n\x14target_language_code\x18\x02 \x01(\t\x1a*\n\x10LanguageCodesSet\x12\x16\n\x0elanguage_codes\x18\x01 \x03(\t:e\xea\x41\x62\n!translate.googleapis.com/Glossary\x12=projects/{project}/locations/{location}/glossaries/{glossary}B\x0b\n\tlanguages"\x95\x01\n\x15\x43reateGlossaryRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x41\n\x08glossary\x18\x02 \x01(\x0b\x32*.google.cloud.translation.v3beta1.GlossaryB\x03\xe0\x41\x02"M\n\x12GetGlossaryRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!translate.googleapis.com/Glossary"P\n\x15\x44\x65leteGlossaryRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!translate.googleapis.com/Glossary"\x98\x01\n\x15ListGlossariesRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01"q\n\x16ListGlossariesResponse\x12>\n\nglossaries\x18\x01 \x03(\x0b\x32*.google.cloud.translation.v3beta1.Glossary\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x8d\x02\n\x16\x43reateGlossaryMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12M\n\x05state\x18\x02 \x01(\x0e\x32>.google.cloud.translation.v3beta1.CreateGlossaryMetadata.State\x12/\n\x0bsubmit_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\x8d\x02\n\x16\x44\x65leteGlossaryMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12M\n\x05state\x18\x02 \x01(\x0e\x32>.google.cloud.translation.v3beta1.DeleteGlossaryMetadata.State\x12/\n\x0bsubmit_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\x85\x01\n\x16\x44\x65leteGlossaryResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0bsubmit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp2\x9a\x10\n\x12TranslationService\x12\xf4\x01\n\rTranslateText\x12\x36.google.cloud.translation.v3beta1.TranslateTextRequest\x1a\x37.google.cloud.translation.v3beta1.TranslateTextResponse"r\x82\xd3\xe4\x93\x02l"6/v3beta1/{parent=projects/*/locations/*}:translateText:\x01*Z/"*/v3beta1/{parent=projects/*}:translateText:\x01*\x12\x93\x02\n\x0e\x44\x65tectLanguage\x12\x37.google.cloud.translation.v3beta1.DetectLanguageRequest\x1a\x38.google.cloud.translation.v3beta1.DetectLanguageResponse"\x8d\x01\x82\xd3\xe4\x93\x02n"7/v3beta1/{parent=projects/*/locations/*}:detectLanguage:\x01*Z0"+/v3beta1/{parent=projects/*}:detectLanguage:\x01*\xda\x41\x16parent,model,mime_type\x12\xab\x02\n\x15GetSupportedLanguages\x12>.google.cloud.translation.v3beta1.GetSupportedLanguagesRequest\x1a\x34.google.cloud.translation.v3beta1.SupportedLanguages"\x9b\x01\x82\xd3\xe4\x93\x02p\x12;/v3beta1/{parent=projects/*/locations/*}/supportedLanguagesZ1\x12//v3beta1/{parent=projects/*}/supportedLanguages\xda\x41"parent,display_language_code,model\x12\xeb\x01\n\x12\x42\x61tchTranslateText\x12;.google.cloud.translation.v3beta1.BatchTranslateTextRequest\x1a\x1d.google.longrunning.Operation"y\x82\xd3\xe4\x93\x02@";/v3beta1/{parent=projects/*/locations/*}:batchTranslateText:\x01*\xca\x41\x30\n\x16\x42\x61tchTranslateResponse\x12\x16\x42\x61tchTranslateMetadata\x12\xe6\x01\n\x0e\x43reateGlossary\x12\x37.google.cloud.translation.v3beta1.CreateGlossaryRequest\x1a\x1d.google.longrunning.Operation"|\x82\xd3\xe4\x93\x02?"3/v3beta1/{parent=projects/*/locations/*}/glossaries:\x08glossary\xda\x41\x0fparent,glossary\xca\x41"\n\x08Glossary\x12\x16\x43reateGlossaryMetadata\x12\xd9\x01\n\x0eListGlossaries\x12\x37.google.cloud.translation.v3beta1.ListGlossariesRequest\x1a\x38.google.cloud.translation.v3beta1.ListGlossariesResponse"T\x82\xd3\xe4\x93\x02\x35\x12\x33/v3beta1/{parent=projects/*/locations/*}/glossaries\xda\x41\x06parent\xda\x41\rparent,filter\x12\xb3\x01\n\x0bGetGlossary\x12\x34.google.cloud.translation.v3beta1.GetGlossaryRequest\x1a*.google.cloud.translation.v3beta1.Glossary"B\x82\xd3\xe4\x93\x02\x35\x12\x33/v3beta1/{name=projects/*/locations/*/glossaries/*}\xda\x41\x04name\x12\xdf\x01\n\x0e\x44\x65leteGlossary\x12\x37.google.cloud.translation.v3beta1.DeleteGlossaryRequest\x1a\x1d.google.longrunning.Operation"u\x82\xd3\xe4\x93\x02\x35*3/v3beta1/{name=projects/*/locations/*/glossaries/*}\xda\x41\x04name\xca\x41\x30\n\x16\x44\x65leteGlossaryResponse\x12\x16\x44\x65leteGlossaryMetadata\x1a~\xca\x41\x18translate.googleapis.com\xd2\x41`https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-translationB\xf1\x01\n"com.google.cloud.translate.v3beta1B\x17TranslationServiceProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/translate/v3beta1;translate\xf8\x01\x01\xaa\x02\x1eGoogle.Cloud.Translate.V3Beta1\xca\x02\x1eGoogle\\Cloud\\Translate\\V3beta1\xea\x02!Google::Cloud::Translate::V3beta1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -75,8 +77,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2610, - serialized_end=2711, + serialized_start=3331, + serialized_end=3432, ) _sym_db.RegisterEnumDescriptor(_BATCHTRANSLATEMETADATA_STATE) @@ -111,8 +113,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2610, - serialized_end=2711, + serialized_start=3331, + serialized_end=3432, ) _sym_db.RegisterEnumDescriptor(_CREATEGLOSSARYMETADATA_STATE) @@ -147,8 +149,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2610, - serialized_end=2711, + serialized_start=3331, + serialized_end=3432, ) _sym_db.RegisterEnumDescriptor(_DELETEGLOSSARYMETADATA_STATE) @@ -175,7 +177,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -193,7 +195,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -205,11 +207,67 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=254, - serialized_end=322, + serialized_start=287, + serialized_end=365, ) +_TRANSLATETEXTREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.cloud.translation.v3beta1.TranslateTextRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3beta1.TranslateTextRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3beta1.TranslateTextRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=770, + serialized_end=815, +) + _TRANSLATETEXTREQUEST = _descriptor.Descriptor( name="TranslateTextRequest", full_name="google.cloud.translation.v3beta1.TranslateTextRequest", @@ -232,7 +290,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -250,7 +308,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -268,7 +326,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -286,7 +344,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -304,7 +362,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -322,7 +382,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -340,20 +400,38 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.cloud.translation.v3beta1.TranslateTextRequest.labels", + index=7, + number=10, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], extensions=[], - nested_types=[], + nested_types=[_TRANSLATETEXTREQUEST_LABELSENTRY], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=325, - serialized_end=563, + serialized_start=368, + serialized_end=815, ) @@ -409,8 +487,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=566, - serialized_end=736, + serialized_start=818, + serialized_end=988, ) @@ -502,11 +580,67 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=739, - serialized_end=912, + serialized_start=991, + serialized_end=1164, ) +_DETECTLANGUAGEREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.cloud.translation.v3beta1.DetectLanguageRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3beta1.DetectLanguageRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3beta1.DetectLanguageRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=770, + serialized_end=815, +) + _DETECTLANGUAGEREQUEST = _descriptor.Descriptor( name="DetectLanguageRequest", full_name="google.cloud.translation.v3beta1.DetectLanguageRequest", @@ -529,7 +663,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -547,7 +683,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -583,12 +719,30 @@ containing_type=None, is_extension=False, extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.cloud.translation.v3beta1.DetectLanguageRequest.labels", + index=4, + number=6, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], - nested_types=[], + nested_types=[_DETECTLANGUAGEREQUEST_LABELSENTRY], enum_types=[], serialized_options=None, is_extendable=False, @@ -603,8 +757,8 @@ fields=[], ) ], - serialized_start=914, - serialized_end=1016, + serialized_start=1167, + serialized_end=1454, ) @@ -660,8 +814,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1018, - serialized_end=1079, + serialized_start=1456, + serialized_end=1517, ) @@ -699,8 +853,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1081, - serialized_end=1176, + serialized_start=1519, + serialized_end=1614, ) @@ -726,7 +880,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -744,7 +900,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -762,7 +918,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -774,8 +930,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1178, - serialized_end=1270, + serialized_start=1617, + serialized_end=1762, ) @@ -813,8 +969,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1272, - serialized_end=1364, + serialized_start=1764, + serialized_end=1856, ) @@ -906,8 +1062,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1366, - serialized_end=1478, + serialized_start=1858, + serialized_end=1970, ) @@ -933,7 +1089,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -945,8 +1101,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1480, - serialized_end=1510, + serialized_start=1972, + serialized_end=2007, ) @@ -972,7 +1128,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1010,8 +1166,8 @@ fields=[], ) ], - serialized_start=1512, - serialized_end=1621, + serialized_start=2009, + serialized_end=2123, ) @@ -1037,7 +1193,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -1049,8 +1205,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1623, - serialized_end=1666, + serialized_start=2125, + serialized_end=2173, ) @@ -1096,8 +1252,8 @@ fields=[], ) ], - serialized_start=1668, - serialized_end=1774, + serialized_start=2175, + serialized_end=2281, ) @@ -1153,8 +1309,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2210, - serialized_end=2255, + serialized_start=2884, + serialized_end=2929, ) _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY = _descriptor.Descriptor( @@ -1209,8 +1365,64 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2257, - serialized_end=2369, + serialized_start=2931, + serialized_end=3043, +) + +_BATCHTRANSLATETEXTREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.cloud.translation.v3beta1.BatchTranslateTextRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3beta1.BatchTranslateTextRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3beta1.BatchTranslateTextRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=770, + serialized_end=815, ) _BATCHTRANSLATETEXTREQUEST = _descriptor.Descriptor( @@ -1235,7 +1447,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1253,7 +1467,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1271,7 +1485,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1289,7 +1503,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1307,7 +1521,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1325,7 +1539,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1343,7 +1557,25 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.cloud.translation.v3beta1.BatchTranslateTextRequest.labels", + index=7, + number=9, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1351,6 +1583,7 @@ nested_types=[ _BATCHTRANSLATETEXTREQUEST_MODELSENTRY, _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY, + _BATCHTRANSLATETEXTREQUEST_LABELSENTRY, ], enum_types=[], serialized_options=None, @@ -1358,8 +1591,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1777, - serialized_end=2369, + serialized_start=2284, + serialized_end=3090, ) @@ -1469,8 +1702,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2372, - serialized_end=2711, + serialized_start=3093, + serialized_end=3432, ) @@ -1580,8 +1813,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2714, - serialized_end=2917, + serialized_start=3435, + serialized_end=3638, ) @@ -1627,8 +1860,8 @@ fields=[], ) ], - serialized_start=2919, - serialized_end=3017, + serialized_start=3640, + serialized_end=3738, ) @@ -1684,8 +1917,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3416, - serialized_end=3494, + serialized_start=4157, + serialized_end=4235, ) _GLOSSARY_LANGUAGECODESSET = _descriptor.Descriptor( @@ -1722,8 +1955,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3496, - serialized_end=3538, + serialized_start=4237, + serialized_end=4279, ) _GLOSSARY = _descriptor.Descriptor( @@ -1748,7 +1981,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1820,7 +2053,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1838,7 +2071,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1856,14 +2089,16 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[_GLOSSARY_LANGUAGECODEPAIR, _GLOSSARY_LANGUAGECODESSET], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Ab\n!translate.googleapis.com/Glossary\022=projects/{project}/locations/{location}/glossaries/{glossary}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1876,8 +2111,8 @@ fields=[], ) ], - serialized_start=3020, - serialized_end=3551, + serialized_start=3741, + serialized_end=4395, ) @@ -1903,7 +2138,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1921,7 +2158,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1933,8 +2170,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3553, - serialized_end=3654, + serialized_start=4398, + serialized_end=4547, ) @@ -1960,7 +2197,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!translate.googleapis.com/Glossary" + ), file=DESCRIPTOR, ) ], @@ -1972,8 +2211,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3656, - serialized_end=3690, + serialized_start=4549, + serialized_end=4626, ) @@ -1999,7 +2238,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!translate.googleapis.com/Glossary" + ), file=DESCRIPTOR, ) ], @@ -2011,8 +2252,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3692, - serialized_end=3729, + serialized_start=4628, + serialized_end=4708, ) @@ -2038,7 +2279,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2056,7 +2299,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2074,7 +2317,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2092,7 +2335,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2104,8 +2347,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3731, - serialized_end=3825, + serialized_start=4711, + serialized_end=4863, ) @@ -2161,8 +2404,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3827, - serialized_end=3940, + serialized_start=4865, + serialized_end=4978, ) @@ -2236,8 +2479,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3943, - serialized_end=4212, + serialized_start=4981, + serialized_end=5250, ) @@ -2311,8 +2554,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4215, - serialized_end=4484, + serialized_start=5253, + serialized_end=5522, ) @@ -2386,13 +2629,17 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4487, - serialized_end=4620, + serialized_start=5525, + serialized_end=5658, ) +_TRANSLATETEXTREQUEST_LABELSENTRY.containing_type = _TRANSLATETEXTREQUEST _TRANSLATETEXTREQUEST.fields_by_name[ "glossary_config" ].message_type = _TRANSLATETEXTGLOSSARYCONFIG +_TRANSLATETEXTREQUEST.fields_by_name[ + "labels" +].message_type = _TRANSLATETEXTREQUEST_LABELSENTRY _TRANSLATETEXTRESPONSE.fields_by_name["translations"].message_type = _TRANSLATION _TRANSLATETEXTRESPONSE.fields_by_name[ "glossary_translations" @@ -2400,6 +2647,10 @@ _TRANSLATION.fields_by_name[ "glossary_config" ].message_type = _TRANSLATETEXTGLOSSARYCONFIG +_DETECTLANGUAGEREQUEST_LABELSENTRY.containing_type = _DETECTLANGUAGEREQUEST +_DETECTLANGUAGEREQUEST.fields_by_name[ + "labels" +].message_type = _DETECTLANGUAGEREQUEST_LABELSENTRY _DETECTLANGUAGEREQUEST.oneofs_by_name["source"].fields.append( _DETECTLANGUAGEREQUEST.fields_by_name["content"] ) @@ -2427,6 +2678,7 @@ "value" ].message_type = _TRANSLATETEXTGLOSSARYCONFIG _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY.containing_type = _BATCHTRANSLATETEXTREQUEST +_BATCHTRANSLATETEXTREQUEST_LABELSENTRY.containing_type = _BATCHTRANSLATETEXTREQUEST _BATCHTRANSLATETEXTREQUEST.fields_by_name[ "models" ].message_type = _BATCHTRANSLATETEXTREQUEST_MODELSENTRY @@ -2435,6 +2687,9 @@ _BATCHTRANSLATETEXTREQUEST.fields_by_name[ "glossaries" ].message_type = _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY +_BATCHTRANSLATETEXTREQUEST.fields_by_name[ + "labels" +].message_type = _BATCHTRANSLATETEXTREQUEST_LABELSENTRY _BATCHTRANSLATEMETADATA.fields_by_name[ "state" ].enum_type = _BATCHTRANSLATEMETADATA_STATE @@ -2562,6 +2817,15 @@ "TranslateTextRequest", (_message.Message,), dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_TRANSLATETEXTREQUEST_LABELSENTRY, + __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.TranslateTextRequest.LabelsEntry) + ), + ), DESCRIPTOR=_TRANSLATETEXTREQUEST, __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2", __doc__="""The request message for synchronous translation. @@ -2588,9 +2852,12 @@ the input text, set to one of the language codes listed in Language Support. parent: - Required. Location to make a regional or global call. Format: + Required. Project or location to make a call. Must refer to a + caller's project. Format: ``projects/{project-id}`` or ``projects/{project-id}/locations/{location-id}``. For global - calls, use ``projects/{project-id}/locations/global``. Models + calls, use ``projects/{project-id}/locations/global`` or + ``projects/{project-id}``. Non-global location is required + for requests using AutoML models or custom glossaries. Models and glossaries must be within the same region (have same location-id), otherwise an INVALID\_ARGUMENT (400) error is returned. @@ -2610,11 +2877,21 @@ Optional. Glossary to be applied. The glossary must be within the same region (have the same location-id) as the model, otherwise an INVALID\_ARGUMENT (400) error is returned. + labels: + Optional. The labels with user-defined metadata for the + request. Label keys and values can be no longer than 63 + characters (Unicode codepoints), can only contain lowercase + letters, numeric characters, underscores and dashes. + International characters are allowed. Label values are + optional. Label keys must start with a letter. See + https://cloud.google.com/translate/docs/labels for more + information. """, # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.TranslateTextRequest) ), ) _sym_db.RegisterMessage(TranslateTextRequest) +_sym_db.RegisterMessage(TranslateTextRequest.LabelsEntry) TranslateTextResponse = _reflection.GeneratedProtocolMessageType( "TranslateTextResponse", @@ -2672,6 +2949,15 @@ "DetectLanguageRequest", (_message.Message,), dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_DETECTLANGUAGEREQUEST_LABELSENTRY, + __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.DetectLanguageRequest.LabelsEntry) + ), + ), DESCRIPTOR=_DETECTLANGUAGEREQUEST, __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2", __doc__="""The request message for language detection. @@ -2679,9 +2965,11 @@ Attributes: parent: - Required. Location to make a regional or global call. Format: - ``projects/{project-id}/locations/{location-id}``. For global - calls, use ``projects/{project-id}/locations/global``. Only + Required. Project or location to make a call. Must refer to a + caller's project. Format: ``projects/{project- + id}/locations/{location-id}`` or ``projects/{project-id}``. + For global calls, use ``projects/{project- + id}/locations/global`` or ``projects/{project-id}``. Only models within the same region (has same location-id) can be used. Otherwise an INVALID\_ARGUMENT (400) error is returned. model: @@ -2701,11 +2989,21 @@ Optional. The format of the source text, for example, "text/html", "text/plain". If left blank, the MIME type defaults to "text/html". + labels: + Optional. The labels with user-defined metadata for the + request. Label keys and values can be no longer than 63 + characters (Unicode codepoints), can only contain lowercase + letters, numeric characters, underscores and dashes. + International characters are allowed. Label values are + optional. Label keys must start with a letter. See + https://cloud.google.com/translate/docs/labels for more + information. """, # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.DetectLanguageRequest) ), ) _sym_db.RegisterMessage(DetectLanguageRequest) +_sym_db.RegisterMessage(DetectLanguageRequest.LabelsEntry) DetectedLanguage = _reflection.GeneratedProtocolMessageType( "DetectedLanguage", @@ -2758,11 +3056,14 @@ Attributes: parent: - Required. Location to make a regional or global call. Format: + Required. Project or location to make a call. Must refer to a + caller's project. Format: ``projects/{project-id}`` or ``projects/{project-id}/locations/{location-id}``. For global - calls, use ``projects/{project-id}/locations/global``. Only - models within the same region (have same location-id) can be - used, otherwise an INVALID\_ARGUMENT (400) error is returned. + calls, use ``projects/{project-id}/locations/global`` or + ``projects/{project-id}``. Non-global location is required + for AutoML models. Only models within the same region (have + same location-id) can be used, otherwise an INVALID\_ARGUMENT + (400) error is returned. display_language_code: Optional. The language to use to return localized, human readable names of supported languages. If missing, then @@ -2896,14 +3197,15 @@ dict( DESCRIPTOR=_GCSDESTINATION, __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2", - __doc__="""The Google Cloud Storage location for the output content + __doc__="""The Google Cloud Storage location for the output content. Attributes: output_uri_prefix: Required. There must be no files under 'output\_uri\_prefix'. - 'output\_uri\_prefix' must end with "/", otherwise an - INVALID\_ARGUMENT (400) error is returned.. + 'output\_uri\_prefix' must end with "/" and start with + "gs://", otherwise an INVALID\_ARGUMENT (400) error is + returned. """, # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.GcsDestination) ), @@ -3006,6 +3308,15 @@ # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.BatchTranslateTextRequest.GlossariesEntry) ), ), + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHTRANSLATETEXTREQUEST_LABELSENTRY, + __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.BatchTranslateTextRequest.LabelsEntry) + ), + ), DESCRIPTOR=_BATCHTRANSLATETEXTREQUEST, __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2", __doc__="""The batch translation request. @@ -3013,12 +3324,12 @@ Attributes: parent: - Required. Location to make a regional call. Format: - ``projects/{project-id}/locations/{location-id}``. The - ``global`` location is not supported for batch translation. - Only AutoML Translation models or glossaries within the same - region (have the same location-id) can be used, otherwise an - INVALID\_ARGUMENT (400) error is returned. + Required. Location to make a call. Must refer to a caller's + project. Format: ``projects/{project-id}/locations/{location- + id}``. The ``global`` location is not supported for batch + translation. Only AutoML Translation models or glossaries + within the same region (have the same location-id) can be + used, otherwise an INVALID\_ARGUMENT (400) error is returned. source_language_code: Required. Source language code. target_language_codes: @@ -3046,6 +3357,15 @@ glossaries: Optional. Glossaries to be applied for translation. It's keyed by target language code. + labels: + Optional. The labels with user-defined metadata for the + request. Label keys and values can be no longer than 63 + characters (Unicode codepoints), can only contain lowercase + letters, numeric characters, underscores and dashes. + International characters are allowed. Label values are + optional. Label keys must start with a letter. See + https://cloud.google.com/translate/docs/labels for more + information. """, # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.BatchTranslateTextRequest) ), @@ -3053,6 +3373,7 @@ _sym_db.RegisterMessage(BatchTranslateTextRequest) _sym_db.RegisterMessage(BatchTranslateTextRequest.ModelsEntry) _sym_db.RegisterMessage(BatchTranslateTextRequest.GlossariesEntry) +_sym_db.RegisterMessage(BatchTranslateTextRequest.LabelsEntry) BatchTranslateMetadata = _reflection.GeneratedProtocolMessageType( "BatchTranslateMetadata", @@ -3419,17 +3740,62 @@ DESCRIPTOR._options = None +_TRANSLATETEXTGLOSSARYCONFIG.fields_by_name["glossary"]._options = None +_TRANSLATETEXTGLOSSARYCONFIG.fields_by_name["ignore_case"]._options = None +_TRANSLATETEXTREQUEST_LABELSENTRY._options = None +_TRANSLATETEXTREQUEST.fields_by_name["contents"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["mime_type"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["source_language_code"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["target_language_code"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["parent"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["model"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["glossary_config"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["labels"]._options = None +_DETECTLANGUAGEREQUEST_LABELSENTRY._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["parent"]._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["model"]._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["mime_type"]._options = None +_GETSUPPORTEDLANGUAGESREQUEST.fields_by_name["parent"]._options = None +_GETSUPPORTEDLANGUAGESREQUEST.fields_by_name["display_language_code"]._options = None +_GETSUPPORTEDLANGUAGESREQUEST.fields_by_name["model"]._options = None +_GCSSOURCE.fields_by_name["input_uri"]._options = None +_INPUTCONFIG.fields_by_name["mime_type"]._options = None +_GCSDESTINATION.fields_by_name["output_uri_prefix"]._options = None _BATCHTRANSLATETEXTREQUEST_MODELSENTRY._options = None _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY._options = None +_BATCHTRANSLATETEXTREQUEST_LABELSENTRY._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["parent"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["source_language_code"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["target_language_codes"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["models"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["input_configs"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["output_config"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["glossaries"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["labels"]._options = None +_GLOSSARY.fields_by_name["name"]._options = None +_GLOSSARY.fields_by_name["entry_count"]._options = None +_GLOSSARY.fields_by_name["submit_time"]._options = None +_GLOSSARY.fields_by_name["end_time"]._options = None +_GLOSSARY._options = None +_CREATEGLOSSARYREQUEST.fields_by_name["parent"]._options = None +_CREATEGLOSSARYREQUEST.fields_by_name["glossary"]._options = None +_GETGLOSSARYREQUEST.fields_by_name["name"]._options = None +_DELETEGLOSSARYREQUEST.fields_by_name["name"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["parent"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["page_size"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["page_token"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["filter"]._options = None _TRANSLATIONSERVICE = _descriptor.ServiceDescriptor( name="TranslationService", full_name="google.cloud.translation.v3beta1.TranslationService", file=DESCRIPTOR, index=0, - serialized_options=_b("\312A\032translation.googleapis.com"), - serialized_start=4623, - serialized_end=6340, + serialized_options=_b( + "\312A\030translate.googleapis.com\322A`https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-translation" + ), + serialized_start=5661, + serialized_end=7735, methods=[ _descriptor.MethodDescriptor( name="TranslateText", @@ -3450,7 +3816,7 @@ input_type=_DETECTLANGUAGEREQUEST, output_type=_DETECTLANGUAGERESPONSE, serialized_options=_b( - '\202\323\344\223\002n"7/v3beta1/{parent=projects/*/locations/*}:detectLanguage:\001*Z0"+/v3beta1/{parent=projects/*}:detectLanguage:\001*' + '\202\323\344\223\002n"7/v3beta1/{parent=projects/*/locations/*}:detectLanguage:\001*Z0"+/v3beta1/{parent=projects/*}:detectLanguage:\001*\332A\026parent,model,mime_type' ), ), _descriptor.MethodDescriptor( @@ -3461,7 +3827,7 @@ input_type=_GETSUPPORTEDLANGUAGESREQUEST, output_type=_SUPPORTEDLANGUAGES, serialized_options=_b( - "\202\323\344\223\002p\022;/v3beta1/{parent=projects/*/locations/*}/supportedLanguagesZ1\022//v3beta1/{parent=projects/*}/supportedLanguages" + '\202\323\344\223\002p\022;/v3beta1/{parent=projects/*/locations/*}/supportedLanguagesZ1\022//v3beta1/{parent=projects/*}/supportedLanguages\332A"parent,display_language_code,model' ), ), _descriptor.MethodDescriptor( @@ -3472,7 +3838,7 @@ input_type=_BATCHTRANSLATETEXTREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002@";/v3beta1/{parent=projects/*/locations/*}:batchTranslateText:\001*' + '\202\323\344\223\002@";/v3beta1/{parent=projects/*/locations/*}:batchTranslateText:\001*\312A0\n\026BatchTranslateResponse\022\026BatchTranslateMetadata' ), ), _descriptor.MethodDescriptor( @@ -3483,7 +3849,7 @@ input_type=_CREATEGLOSSARYREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002?"3/v3beta1/{parent=projects/*/locations/*}/glossaries:\010glossary' + '\202\323\344\223\002?"3/v3beta1/{parent=projects/*/locations/*}/glossaries:\010glossary\332A\017parent,glossary\312A"\n\010Glossary\022\026CreateGlossaryMetadata' ), ), _descriptor.MethodDescriptor( @@ -3494,7 +3860,7 @@ input_type=_LISTGLOSSARIESREQUEST, output_type=_LISTGLOSSARIESRESPONSE, serialized_options=_b( - "\202\323\344\223\0025\0223/v3beta1/{parent=projects/*/locations/*}/glossaries" + "\202\323\344\223\0025\0223/v3beta1/{parent=projects/*/locations/*}/glossaries\332A\006parent\332A\rparent,filter" ), ), _descriptor.MethodDescriptor( @@ -3505,7 +3871,7 @@ input_type=_GETGLOSSARYREQUEST, output_type=_GLOSSARY, serialized_options=_b( - "\202\323\344\223\0025\0223/v3beta1/{name=projects/*/locations/*/glossaries/*}" + "\202\323\344\223\0025\0223/v3beta1/{name=projects/*/locations/*/glossaries/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3516,7 +3882,7 @@ input_type=_DELETEGLOSSARYREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\0025*3/v3beta1/{name=projects/*/locations/*/glossaries/*}" + "\202\323\344\223\0025*3/v3beta1/{name=projects/*/locations/*/glossaries/*}\332A\004name\312A0\n\026DeleteGlossaryResponse\022\026DeleteGlossaryMetadata" ), ), ], diff --git a/translate/setup.py b/translate/setup.py index 5bacdb534155..24d632a90d00 100644 --- a/translate/setup.py +++ b/translate/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-translate" description = "Google Cloud Translation API client library" -version = "1.6.0" +version = "2.0.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/translate/synth.metadata b/translate/synth.metadata index cd7d315b9cbf..6c4de912fede 100644 --- a/translate/synth.metadata +++ b/translate/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:45:19.878568Z", + "updateTime": "2019-10-18T22:49:41.466785Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.0", + "dockerImage": "googleapis/artman@sha256:fd2b49cce3d652929cc80157ec2d91bebe993f7cd4e89afaad80f9c785f8bf36" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "0e9a6d15fcb944ed40921ba0aad2082ee1bc7edd", + "internalRef": "275543900" } }, { @@ -34,6 +34,16 @@ "generator": "gapic", "config": "google/cloud/translate/artman_translate_v3beta1.yaml" } + }, + { + "client": { + "source": "googleapis", + "apiName": "translate", + "apiVersion": "v3", + "language": "python", + "generator": "gapic", + "config": "google/cloud/translate/artman_translate_v3.yaml" + } } ] } \ No newline at end of file diff --git a/translate/synth.py b/translate/synth.py index dd81273af78b..14a2ad464c37 100644 --- a/translate/synth.py +++ b/translate/synth.py @@ -19,7 +19,7 @@ gapic = gcp.GAPICGenerator() common = gcp.CommonTemplates() -versions = ["v3beta1"] +versions = ["v3beta1", "v3"] excludes = [ "setup.py", @@ -41,12 +41,15 @@ s.move(library / "tests") s.move(library / f"docs/gapic/{version}") -# translation -> translate -s.replace( - "google/**/translation_service_pb2_grpc.py", - "google.cloud.translation_v3beta1.proto", - "google.cloud.translate_v3beta1.proto", -) + # translation -> translate + s.replace( + "google/**/translation_service_pb2_grpc.py", + f"google.cloud.translation_{version}.proto", + f"google.cloud.translate_{version}.proto", + ) + +# Use the highest version library to generate documentation import alias. +s.move(library / "google/cloud/translate.py") s.replace( "google/cloud/**/translation_service_pb2.py", diff --git a/translate/tests/system.py b/translate/tests/system.py index c586411b6111..b92ead125acf 100644 --- a/translate/tests/system.py +++ b/translate/tests/system.py @@ -16,7 +16,7 @@ import unittest -from google.cloud import translate +from google.cloud import translate_v2 class Config(object): @@ -30,7 +30,7 @@ class Config(object): def setUpModule(): - Config.CLIENT = translate.Client() + Config.CLIENT = translate_v2.Client() class TestTranslate(unittest.TestCase): diff --git a/translate/tests/system/test_vpcsc.py b/translate/tests/system/test_vpcsc.py new file mode 100644 index 000000000000..427d0be6757d --- /dev/null +++ b/translate/tests/system/test_vpcsc.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests for VPC-SC.""" + +import os +import pytest + +from google.api_core import exceptions +from google.cloud import translate_v3beta1 + + +IS_INSIDE_VPCSC = "GOOGLE_CLOUD_TESTS_IN_VPCSC" in os.environ +# If IS_INSIDE_VPCSC is set, these environment variables should also be set +if IS_INSIDE_VPCSC: + PROJECT_INSIDE = os.environ["PROJECT_ID"] + PROJECT_OUTSIDE = os.environ["GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT"] + + +class TestVPCServiceControl(object): + @classmethod + def setup(self): + self._client = translate_v3beta1.TranslationServiceClient() + self._parent_inside = self._client.location_path(PROJECT_INSIDE, "us-central1") + self._parent_outside = self._client.location_path( + PROJECT_OUTSIDE, "us-central1" + ) + + def make_glossary_name(project_id): + return "projects/{0}/locations/us-central1/glossaries/fake_glossary".format( + project_id + ) + + self._glossary_name_inside = make_glossary_name(PROJECT_INSIDE) + self._glossary_name_outside = make_glossary_name(PROJECT_OUTSIDE) + + @staticmethod + def _is_rejected(call): + try: + responses = call() + print("responses: ", responses) + except exceptions.PermissionDenied as e: + print("PermissionDenied Exception: ", e) + return e.message == "Request is prohibited by organization's policy" + except Exception as e: + print("Other Exception: ", e) + pass + return False + + @staticmethod + def _do_test(delayed_inside, delayed_outside): + assert TestVPCServiceControl._is_rejected(delayed_outside) + assert not (TestVPCServiceControl._is_rejected(delayed_inside)) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test must be run in VPCSC. To enable this test, set the environment variable GOOGLE_CLOUD_TESTS_IN_VPCSC to True", + ) + def test_create_glossary(self): + def make_glossary(project_id): + return { + "name": "projects/{0}/locations/us-central1/glossaries/fake_glossary".format( + project_id + ), + "language_codes_set": {"language_codes": ["en", "ja"]}, + "input_config": { + "gcs_source": {"input_uri": "gs://fake-bucket/fake_glossary.csv"} + }, + } + + glossary_inside = make_glossary(PROJECT_INSIDE) + + def delayed_inside(): + return self._client.create_glossary(self._parent_inside, glossary_inside) + + glossary_outside = make_glossary(PROJECT_OUTSIDE) + + def delayed_outside(): + return self._client.create_glossary(self._parent_outside, glossary_outside) + + TestVPCServiceControl._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test must be run in VPCSC. To enable this test, set the environment variable GOOGLE_CLOUD_TESTS_IN_VPCSC to True", + ) + def test_list_glossaries(self): + # list_glossaries() returns an GRPCIterator instance, and we need to actually iterate through it + # by calling _next_page() to get real response. + def delayed_inside(): + return self._client.list_glossaries(self._parent_inside)._next_page() + + def delayed_outside(): + return self._client.list_glossaries(self._parent_outside)._next_page() + + TestVPCServiceControl._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test must be run in VPCSC. To enable this test, set the environment variable GOOGLE_CLOUD_TESTS_IN_VPCSC to True", + ) + def test_get_glossary(self): + def delayed_inside(): + return self._client.get_glossary(self._glossary_name_inside) + + def delayed_outside(): + return self._client.get_glossary(self._glossary_name_outside) + + TestVPCServiceControl._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test must be run in VPCSC. To enable this test, set the environment variable GOOGLE_CLOUD_TESTS_IN_VPCSC to True", + ) + def test_delete_glossary(self): + def delayed_inside(): + return self._client.delete_glossary(self._glossary_name_inside) + + def delayed_outside(): + return self._client.delete_glossary(self._glossary_name_outside) + + TestVPCServiceControl._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test must be run in VPCSC. To enable this test, set the environment variable GOOGLE_CLOUD_TESTS_IN_VPCSC to True", + ) + def test_batch_translate_text(self): + source_language_code = "en" + target_language_codes = ["es"] + input_configs = [{"gcs_source": {"input_uri": "gs://fake-bucket/*"}}] + output_config = { + "gcs_destination": {"output_uri_prefix": "gs://fake-bucket/output/"} + } + + def delayed_inside(): + return self._client.batch_translate_text( + self._parent_inside, + source_language_code, + target_language_codes, + input_configs, + output_config, + ) + + def delayed_outside(): + return self._client.batch_translate_text( + self._parent_outside, + source_language_code, + target_language_codes, + input_configs, + output_config, + ) + + TestVPCServiceControl._do_test(delayed_inside, delayed_outside) diff --git a/translate/tests/unit/gapic/v3/test_translation_service_client_v3.py b/translate/tests/unit/gapic/v3/test_translation_service_client_v3.py new file mode 100644 index 000000000000..16180aace4ad --- /dev/null +++ b/translate/tests/unit/gapic/v3/test_translation_service_client_v3.py @@ -0,0 +1,470 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests.""" + +import mock +import pytest + +from google.rpc import status_pb2 + +from google.cloud import translate_v3 +from google.cloud.translate_v3.proto import translation_service_pb2 +from google.longrunning import operations_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, method, request_serializer=None, response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestTranslationServiceClient(object): + def test_translate_text(self): + # Setup Expected Response + expected_response = {} + expected_response = translation_service_pb2.TranslateTextResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + contents = [] + target_language_code = "targetLanguageCode1323228230" + parent = client.location_path("[PROJECT]", "[LOCATION]") + + response = client.translate_text(contents, target_language_code, parent) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.TranslateTextRequest( + contents=contents, target_language_code=target_language_code, parent=parent + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_translate_text_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup request + contents = [] + target_language_code = "targetLanguageCode1323228230" + parent = client.location_path("[PROJECT]", "[LOCATION]") + + with pytest.raises(CustomException): + client.translate_text(contents, target_language_code, parent) + + def test_detect_language(self): + # Setup Expected Response + expected_response = {} + expected_response = translation_service_pb2.DetectLanguageResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + response = client.detect_language(parent) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.DetectLanguageRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_detect_language_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + with pytest.raises(CustomException): + client.detect_language(parent) + + def test_get_supported_languages(self): + # Setup Expected Response + expected_response = {} + expected_response = translation_service_pb2.SupportedLanguages( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + response = client.get_supported_languages(parent) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.GetSupportedLanguagesRequest( + parent=parent + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_supported_languages_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + with pytest.raises(CustomException): + client.get_supported_languages(parent) + + def test_batch_translate_text(self): + # Setup Expected Response + total_characters = 1368640955 + translated_characters = 1337326221 + failed_characters = 1723028396 + expected_response = { + "total_characters": total_characters, + "translated_characters": translated_characters, + "failed_characters": failed_characters, + } + expected_response = translation_service_pb2.BatchTranslateResponse( + **expected_response + ) + operation = operations_pb2.Operation( + name="operations/test_batch_translate_text", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + source_language_code = "sourceLanguageCode1687263568" + target_language_codes = [] + input_configs = [] + output_config = {} + + response = client.batch_translate_text( + parent, + source_language_code, + target_language_codes, + input_configs, + output_config, + ) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.BatchTranslateTextRequest( + parent=parent, + source_language_code=source_language_code, + target_language_codes=target_language_codes, + input_configs=input_configs, + output_config=output_config, + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_batch_translate_text_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_batch_translate_text_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + source_language_code = "sourceLanguageCode1687263568" + target_language_codes = [] + input_configs = [] + output_config = {} + + response = client.batch_translate_text( + parent, + source_language_code, + target_language_codes, + input_configs, + output_config, + ) + exception = response.exception() + assert exception.errors[0] == error + + def test_create_glossary(self): + # Setup Expected Response + name = "name3373707" + entry_count = 811131134 + expected_response = {"name": name, "entry_count": entry_count} + expected_response = translation_service_pb2.Glossary(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_create_glossary", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + glossary = {} + + response = client.create_glossary(parent, glossary) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.CreateGlossaryRequest( + parent=parent, glossary=glossary + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_glossary_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_create_glossary_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + glossary = {} + + response = client.create_glossary(parent, glossary) + exception = response.exception() + assert exception.errors[0] == error + + def test_list_glossaries(self): + # Setup Expected Response + next_page_token = "" + glossaries_element = {} + glossaries = [glossaries_element] + expected_response = { + "next_page_token": next_page_token, + "glossaries": glossaries, + } + expected_response = translation_service_pb2.ListGlossariesResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_glossaries(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.glossaries[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.ListGlossariesRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_glossaries_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_glossaries(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_glossary(self): + # Setup Expected Response + name_2 = "name2-1052831874" + entry_count = 811131134 + expected_response = {"name": name_2, "entry_count": entry_count} + expected_response = translation_service_pb2.Glossary(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + name = client.glossary_path("[PROJECT]", "[LOCATION]", "[GLOSSARY]") + + response = client.get_glossary(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.GetGlossaryRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_glossary_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup request + name = client.glossary_path("[PROJECT]", "[LOCATION]", "[GLOSSARY]") + + with pytest.raises(CustomException): + client.get_glossary(name) + + def test_delete_glossary(self): + # Setup Expected Response + name_2 = "name2-1052831874" + expected_response = {"name": name_2} + expected_response = translation_service_pb2.DeleteGlossaryResponse( + **expected_response + ) + operation = operations_pb2.Operation( + name="operations/test_delete_glossary", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + name = client.glossary_path("[PROJECT]", "[LOCATION]", "[GLOSSARY]") + + response = client.delete_glossary(name) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.DeleteGlossaryRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_glossary_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_delete_glossary_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + name = client.glossary_path("[PROJECT]", "[LOCATION]", "[GLOSSARY]") + + response = client.delete_glossary(name) + exception = response.exception() + assert exception.errors[0] == error diff --git a/translate/tests/unit/gapic/v3beta1/test_translation_service_client_v3beta1.py b/translate/tests/unit/gapic/v3beta1/test_translation_service_client_v3beta1.py index c4d45f92c69f..6bd32d82c20a 100644 --- a/translate/tests/unit/gapic/v3beta1/test_translation_service_client_v3beta1.py +++ b/translate/tests/unit/gapic/v3beta1/test_translation_service_client_v3beta1.py @@ -80,13 +80,14 @@ def test_translate_text(self): # Setup Request contents = [] target_language_code = "targetLanguageCode1323228230" + parent = client.location_path("[PROJECT]", "[LOCATION]") - response = client.translate_text(contents, target_language_code) + response = client.translate_text(contents, target_language_code, parent) assert expected_response == response assert len(channel.requests) == 1 expected_request = translation_service_pb2.TranslateTextRequest( - contents=contents, target_language_code=target_language_code + contents=contents, target_language_code=target_language_code, parent=parent ) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -102,9 +103,10 @@ def test_translate_text_exception(self): # Setup request contents = [] target_language_code = "targetLanguageCode1323228230" + parent = client.location_path("[PROJECT]", "[LOCATION]") with pytest.raises(CustomException): - client.translate_text(contents, target_language_code) + client.translate_text(contents, target_language_code, parent) def test_detect_language(self): # Setup Expected Response @@ -120,11 +122,14 @@ def test_detect_language(self): create_channel.return_value = channel client = translate_v3beta1.TranslationServiceClient() - response = client.detect_language() + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + response = client.detect_language(parent) assert expected_response == response assert len(channel.requests) == 1 - expected_request = translation_service_pb2.DetectLanguageRequest() + expected_request = translation_service_pb2.DetectLanguageRequest(parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -136,8 +141,11 @@ def test_detect_language_exception(self): create_channel.return_value = channel client = translate_v3beta1.TranslationServiceClient() + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + with pytest.raises(CustomException): - client.detect_language() + client.detect_language(parent) def test_get_supported_languages(self): # Setup Expected Response @@ -153,11 +161,16 @@ def test_get_supported_languages(self): create_channel.return_value = channel client = translate_v3beta1.TranslationServiceClient() - response = client.get_supported_languages() + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + response = client.get_supported_languages(parent) assert expected_response == response assert len(channel.requests) == 1 - expected_request = translation_service_pb2.GetSupportedLanguagesRequest() + expected_request = translation_service_pb2.GetSupportedLanguagesRequest( + parent=parent + ) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -169,8 +182,11 @@ def test_get_supported_languages_exception(self): create_channel.return_value = channel client = translate_v3beta1.TranslationServiceClient() + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + with pytest.raises(CustomException): - client.get_supported_languages() + client.get_supported_languages(parent) def test_batch_translate_text(self): # Setup Expected Response @@ -198,19 +214,25 @@ def test_batch_translate_text(self): client = translate_v3beta1.TranslationServiceClient() # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") source_language_code = "sourceLanguageCode1687263568" target_language_codes = [] input_configs = [] output_config = {} response = client.batch_translate_text( - source_language_code, target_language_codes, input_configs, output_config + parent, + source_language_code, + target_language_codes, + input_configs, + output_config, ) result = response.result() assert expected_response == result assert len(channel.requests) == 1 expected_request = translation_service_pb2.BatchTranslateTextRequest( + parent=parent, source_language_code=source_language_code, target_language_codes=target_language_codes, input_configs=input_configs, @@ -235,13 +257,18 @@ def test_batch_translate_text_exception(self): client = translate_v3beta1.TranslationServiceClient() # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") source_language_code = "sourceLanguageCode1687263568" target_language_codes = [] input_configs = [] output_config = {} response = client.batch_translate_text( - source_language_code, target_language_codes, input_configs, output_config + parent, + source_language_code, + target_language_codes, + input_configs, + output_config, ) exception = response.exception() assert exception.errors[0] == error @@ -322,14 +349,17 @@ def test_list_glossaries(self): create_channel.return_value = channel client = translate_v3beta1.TranslationServiceClient() - paged_list_response = client.list_glossaries() + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_glossaries(parent) resources = list(paged_list_response) assert len(resources) == 1 assert expected_response.glossaries[0] == resources[0] assert len(channel.requests) == 1 - expected_request = translation_service_pb2.ListGlossariesRequest() + expected_request = translation_service_pb2.ListGlossariesRequest(parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -340,7 +370,10 @@ def test_list_glossaries_exception(self): create_channel.return_value = channel client = translate_v3beta1.TranslationServiceClient() - paged_list_response = client.list_glossaries() + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_glossaries(parent) with pytest.raises(CustomException): list(paged_list_response) diff --git a/translate/tests/unit/test__http.py b/translate/tests/unit/v2/test__http.py similarity index 100% rename from translate/tests/unit/test__http.py rename to translate/tests/unit/v2/test__http.py diff --git a/translate/tests/unit/test_client.py b/translate/tests/unit/v2/test_client.py similarity index 99% rename from translate/tests/unit/test_client.py rename to translate/tests/unit/v2/test_client.py index dfbc5affaa73..2c0f72b1a1e5 100644 --- a/translate/tests/unit/test_client.py +++ b/translate/tests/unit/v2/test_client.py @@ -18,7 +18,7 @@ class TestClient(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.translate import Client + from google.cloud.translate_v2 import Client return Client diff --git a/videointelligence/docs/conf.py b/videointelligence/docs/conf.py index c9ea80a329f8..ceaf0b34c248 100644 --- a/videointelligence/docs/conf.py +++ b/videointelligence/docs/conf.py @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/videointelligence/docs/gapic/v1beta1/api.rst b/videointelligence/docs/gapic/v1beta1/api.rst deleted file mode 100644 index 2ec043d68788..000000000000 --- a/videointelligence/docs/gapic/v1beta1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Google Cloud Video Intelligence API -============================================== - -.. automodule:: google.cloud.videointelligence_v1beta1 - :members: - :inherited-members: \ No newline at end of file diff --git a/videointelligence/docs/gapic/v1beta1/types.rst b/videointelligence/docs/gapic/v1beta1/types.rst deleted file mode 100644 index 00374b5af4d5..000000000000 --- a/videointelligence/docs/gapic/v1beta1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Google Cloud Video Intelligence API Client -==================================================== - -.. automodule:: google.cloud.videointelligence_v1beta1.types - :members: \ No newline at end of file diff --git a/videointelligence/docs/index.rst b/videointelligence/docs/index.rst index 868afa75297c..5a888812a870 100644 --- a/videointelligence/docs/index.rst +++ b/videointelligence/docs/index.rst @@ -26,7 +26,7 @@ An API and type reference is provided for this beta: gapic/v1p3beta1/api gapic/v1p3beta1/types -The previous beta releases, spelled ``v1p2beta1``, ``v1p1beta1``, ``v1beta1``, and +The previous beta releases, spelled ``v1p2beta1``, ``v1p1beta1``, and ``v1beta2``, are provided to continue to support code previously written against them. In order to use ththem, you will want to import from e.g. ``google.cloud.videointelligence_v1beta2`` in lieu of @@ -41,8 +41,6 @@ An API and type reference is provided the these betas also: gapic/v1p2beta1/types gapic/v1p1beta1/api gapic/v1p1beta1/types - gapic/v1beta1/api - gapic/v1beta1/types gapic/v1beta2/api gapic/v1beta2/types diff --git a/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client.py b/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client.py index a9058fb38c33..86210ceaad2e 100644 --- a/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client.py +++ b/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client.py @@ -191,9 +191,9 @@ def __init__( # Service calls def annotate_video( self, + features, input_uri=None, input_content=None, - features=None, video_context=None, output_uri=None, location_id=None, @@ -213,11 +213,11 @@ def annotate_video( >>> >>> client = videointelligence_v1.VideoIntelligenceServiceClient() >>> - >>> input_uri = 'gs://cloud-samples-data/video/cat.mp4' >>> features_element = enums.Feature.LABEL_DETECTION >>> features = [features_element] + >>> input_uri = 'gs://cloud-samples-data/video/cat.mp4' >>> - >>> response = client.annotate_video(input_uri=input_uri, features=features) + >>> response = client.annotate_video(features, input_uri=input_uri) >>> >>> def callback(operation_future): ... # Handle result. @@ -229,6 +229,7 @@ def annotate_video( >>> metadata = response.metadata() Args: + features (list[~google.cloud.videointelligence_v1.types.Feature]): Required. Requested video annotation features. input_uri (str): Input video location. Currently, only `Google Cloud Storage `__ URIs are supported, which must be specified in the following format: ``gs://bucket-id/object-id`` @@ -241,19 +242,18 @@ def annotate_video( request as ``input_content``. If set, ``input_content`` should be unset. input_content (bytes): The video data bytes. If unset, the input video(s) should be specified via ``input_uri``. If set, ``input_uri`` should be unset. - features (list[~google.cloud.videointelligence_v1.types.Feature]): Requested video annotation features. video_context (Union[dict, ~google.cloud.videointelligence_v1.types.VideoContext]): Additional video context and/or feature-specific parameters. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.videointelligence_v1.types.VideoContext` - output_uri (str): Optional location where the output (in JSON format) should be stored. + output_uri (str): Optional. Location where the output (in JSON format) should be stored. Currently, only `Google Cloud Storage `__ URIs are supported, which must be specified in the following format: ``gs://bucket-id/object-id`` (other URI formats return ``google.rpc.Code.INVALID_ARGUMENT``). For more information, see `Request URIs `__. - location_id (str): Optional cloud region where annotation should take place. Supported + location_id (str): Optional. Cloud region where annotation should take place. Supported cloud regions: ``us-east1``, ``us-west1``, ``europe-west1``, ``asia-east1``. If no region is specified, a region will be determined based on video file location. @@ -288,9 +288,9 @@ def annotate_video( ) request = video_intelligence_pb2.AnnotateVideoRequest( + features=features, input_uri=input_uri, input_content=input_content, - features=features, video_context=video_context, output_uri=output_uri, location_id=location_id, diff --git a/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client_config.py b/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client_config.py index b9eef5e2a6a7..70a9b8812235 100644 --- a/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client_config.py +++ b/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client_config.py @@ -7,19 +7,19 @@ }, "retry_params": { "default": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 2.5, - "max_retry_delay_millis": 120000, - "initial_rpc_timeout_millis": 120000, + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 120000, + "max_rpc_timeout_millis": 20000, "total_timeout_millis": 600000, } }, "methods": { "AnnotateVideo": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", } }, diff --git a/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence.proto b/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence.proto index 4c7a0ad131a2..6611e9eb2c61 100644 --- a/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence.proto +++ b/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence.proto @@ -19,6 +19,7 @@ package google.cloud.videointelligence.v1; import "google/api/annotations.proto"; import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; @@ -35,19 +36,22 @@ option ruby_package = "Google::Cloud::VideoIntelligence::V1"; // Service that implements Google Cloud Video Intelligence API. service VideoIntelligenceService { option (google.api.default_host) = "videointelligence.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; // Performs asynchronous video annotation. Progress and results can be // retrieved through the `google.longrunning.Operations` interface. // `Operation.metadata` contains `AnnotateVideoProgress` (progress). // `Operation.response` contains `AnnotateVideoResponse` (results). - rpc AnnotateVideo(AnnotateVideoRequest) - returns (google.longrunning.Operation) { + rpc AnnotateVideo(AnnotateVideoRequest) returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1/videos:annotate" body: "*" }; + option (google.api.method_signature) = "input_uri,features"; + option (google.longrunning.operation_info) = { + response_type: "AnnotateVideoResponse" + metadata_type: "AnnotateVideoProgress" + }; } } @@ -57,10 +61,10 @@ message AnnotateVideoRequest { // [Google Cloud Storage](https://cloud.google.com/storage/) URIs are // supported, which must be specified in the following format: // `gs://bucket-id/object-id` (other URI formats return - // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For - // more information, see [Request URIs](/storage/docs/reference-uris). A video - // URI may include wildcards in `object-id`, and thus identify multiple - // videos. Supported wildcards: '*' to match 0 or more characters; + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see + // [Request URIs](/storage/docs/reference-uris). + // A video URI may include wildcards in `object-id`, and thus identify + // multiple videos. Supported wildcards: '*' to match 0 or more characters; // '?' to match 1 character. If unset, the input video should be embedded // in the request as `input_content`. If set, `input_content` should be unset. string input_uri = 1; @@ -70,24 +74,24 @@ message AnnotateVideoRequest { // If set, `input_uri` should be unset. bytes input_content = 6; - // Requested video annotation features. - repeated Feature features = 2; + // Required. Requested video annotation features. + repeated Feature features = 2 [(google.api.field_behavior) = REQUIRED]; // Additional video context and/or feature-specific parameters. VideoContext video_context = 3; - // Optional location where the output (in JSON format) should be stored. + // Optional. Location where the output (in JSON format) should be stored. // Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) // URIs are supported, which must be specified in the following format: // `gs://bucket-id/object-id` (other URI formats return - // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For - // more information, see [Request URIs](/storage/docs/reference-uris). - string output_uri = 4; + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see + // [Request URIs](/storage/docs/reference-uris). + string output_uri = 4 [(google.api.field_behavior) = OPTIONAL]; - // Optional cloud region where annotation should take place. Supported cloud + // Optional. Cloud region where annotation should take place. Supported cloud // regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region // is specified, a region will be determined based on video file location. - string location_id = 5; + string location_id = 5 [(google.api.field_behavior) = OPTIONAL]; } // Video context and/or feature-specific parameters. @@ -119,6 +123,69 @@ message VideoContext { ObjectTrackingConfig object_tracking_config = 13; } +// Video annotation feature. +enum Feature { + // Unspecified. + FEATURE_UNSPECIFIED = 0; + + // Label detection. Detect objects, such as dog or flower. + LABEL_DETECTION = 1; + + // Shot change detection. + SHOT_CHANGE_DETECTION = 2; + + // Explicit content detection. + EXPLICIT_CONTENT_DETECTION = 3; + + // Human face detection and tracking. + FACE_DETECTION = 4; + + // Speech transcription. + SPEECH_TRANSCRIPTION = 6; + + // OCR text detection and tracking. + TEXT_DETECTION = 7; + + // Object detection and tracking. + OBJECT_TRACKING = 9; +} + +// Label detection mode. +enum LabelDetectionMode { + // Unspecified. + LABEL_DETECTION_MODE_UNSPECIFIED = 0; + + // Detect shot-level labels. + SHOT_MODE = 1; + + // Detect frame-level labels. + FRAME_MODE = 2; + + // Detect both shot-level and frame-level labels. + SHOT_AND_FRAME_MODE = 3; +} + +// Bucketized representation of likelihood. +enum Likelihood { + // Unspecified likelihood. + LIKELIHOOD_UNSPECIFIED = 0; + + // Very unlikely. + VERY_UNLIKELY = 1; + + // Unlikely. + UNLIKELY = 2; + + // Possible. + POSSIBLE = 3; + + // Likely. + LIKELY = 4; + + // Very likely. + VERY_LIKELY = 5; +} + // Config for LABEL_DETECTION. message LabelDetectionConfig { // What labels should be detected with LABEL_DETECTION, in addition to @@ -161,9 +228,9 @@ message ShotChangeDetectionConfig { string model = 1; } -// Config for EXPLICIT_CONTENT_DETECTION. -message ExplicitContentDetectionConfig { - // Model to use for explicit content detection. +// Config for OBJECT_TRACKING. +message ObjectTrackingConfig { + // Model to use for object tracking. // Supported values: "builtin/stable" (the default if unset) and // "builtin/latest". string model = 1; @@ -180,9 +247,9 @@ message FaceDetectionConfig { bool include_bounding_boxes = 2; } -// Config for OBJECT_TRACKING. -message ObjectTrackingConfig { - // Model to use for object tracking. +// Config for EXPLICIT_CONTENT_DETECTION. +message ExplicitContentDetectionConfig { + // Model to use for explicit content detection. // Supported values: "builtin/stable" (the default if unset) and // "builtin/latest". string model = 1; @@ -344,8 +411,11 @@ message VideoAnnotationResults { repeated LabelAnnotation segment_label_annotations = 2; // Presence label annotations on video level or user specified segment level. - // There is exactly one element for each unique label. This will eventually - // get publicly exposed and the restriction will be removed. + // There is exactly one element for each unique label. Compared to the + // existing topical `segment_label_annotations`, this field presents more + // fine-grained, segment-level labels detected in video content and is made + // available only when the client sets `LabelDetectionConfig.model` to + // "builtin/latest" in the request. repeated LabelAnnotation segment_presence_label_annotations = 23; // Topical label annotations on shot level. @@ -353,8 +423,10 @@ message VideoAnnotationResults { repeated LabelAnnotation shot_label_annotations = 3; // Presence label annotations on shot level. There is exactly one element for - // each unique label. This will eventually get publicly exposed and the - // restriction will be removed. + // each unique label. Compared to the existing topical + // `shot_label_annotations`, this field presents more fine-grained, shot-level + // labels detected in video content and is made available only when the client + // sets `LabelDetectionConfig.model` to "builtin/latest" in the request. repeated LabelAnnotation shot_presence_label_annotations = 24; // Label annotations on frame level. @@ -429,72 +501,71 @@ message AnnotateVideoProgress { // Config for SPEECH_TRANSCRIPTION. message SpeechTranscriptionConfig { - // *Required* The language of the supplied audio as a + // Required. *Required* The language of the supplied audio as a // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. // Example: "en-US". // See [Language Support](https://cloud.google.com/speech/docs/languages) // for a list of the currently supported language codes. - string language_code = 1; + string language_code = 1 [(google.api.field_behavior) = REQUIRED]; - // *Optional* Maximum number of recognition hypotheses to be returned. + // Optional. Maximum number of recognition hypotheses to be returned. // Specifically, the maximum number of `SpeechRecognitionAlternative` messages // within each `SpeechTranscription`. The server may return fewer than // `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will // return a maximum of one. If omitted, will return a maximum of one. - int32 max_alternatives = 2; + int32 max_alternatives = 2 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* If set to `true`, the server will attempt to filter out + // Optional. If set to `true`, the server will attempt to filter out // profanities, replacing all but the initial character in each filtered word // with asterisks, e.g. "f***". If set to `false` or omitted, profanities // won't be filtered out. - bool filter_profanity = 3; + bool filter_profanity = 3 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* A means to provide context to assist the speech recognition. - repeated SpeechContext speech_contexts = 4; + // Optional. A means to provide context to assist the speech recognition. + repeated SpeechContext speech_contexts = 4 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* If 'true', adds punctuation to recognition result hypotheses. + // Optional. If 'true', adds punctuation to recognition result hypotheses. // This feature is only available in select languages. Setting this for // requests in other languages has no effect at all. The default 'false' value // does not add punctuation to result hypotheses. NOTE: "This is currently // offered as an experimental service, complimentary to all users. In the // future this may be exclusively available as a premium feature." - bool enable_automatic_punctuation = 5; + bool enable_automatic_punctuation = 5 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* For file formats, such as MXF or MKV, supporting multiple audio + // Optional. For file formats, such as MXF or MKV, supporting multiple audio // tracks, specify up to two tracks. Default: track 0. - repeated int32 audio_tracks = 6; + repeated int32 audio_tracks = 6 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* If 'true', enables speaker detection for each recognized word in + // Optional. If 'true', enables speaker detection for each recognized word in // the top alternative of the recognition result using a speaker_tag provided // in the WordInfo. // Note: When this is true, we send all the words from the beginning of the // audio for the top alternative in every consecutive responses. // This is done in order to improve our speaker tags as our models learn to // identify the speakers in the conversation over time. - bool enable_speaker_diarization = 7; + bool enable_speaker_diarization = 7 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* - // If set, specifies the estimated number of speakers in the conversation. + // Optional. If set, specifies the estimated number of speakers in the conversation. // If not set, defaults to '2'. // Ignored unless enable_speaker_diarization is set to true. - int32 diarization_speaker_count = 8; + int32 diarization_speaker_count = 8 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* If `true`, the top result includes a list of words and the + // Optional. If `true`, the top result includes a list of words and the // confidence for those words. If `false`, no word-level confidence // information is returned. The default is `false`. - bool enable_word_confidence = 9; + bool enable_word_confidence = 9 [(google.api.field_behavior) = OPTIONAL]; } // Provides "hints" to the speech recognizer to favor specific words and phrases // in the results. message SpeechContext { - // *Optional* A list of strings containing words and phrases "hints" so that + // Optional. A list of strings containing words and phrases "hints" so that // the speech recognition is more likely to recognize them. This can be used // to improve the accuracy for specific words and phrases, for example, if // specific commands are typically spoken by the user. This can also be used // to add additional words to the vocabulary of the recognizer. See // [usage limits](https://cloud.google.com/speech/limits#content). - repeated string phrases = 1; + repeated string phrases = 1 [(google.api.field_behavior) = OPTIONAL]; } // A speech recognition result corresponding to a portion of the audio. @@ -505,11 +576,10 @@ message SpeechTranscription { // ranked by the recognizer. repeated SpeechRecognitionAlternative alternatives = 1; - // Output only. The - // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the - // language in this result. This language code was detected to have the most - // likelihood of being spoken in the audio. - string language_code = 2; + // Output only. The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of + // the language in this result. This language code was detected to have the + // most likelihood of being spoken in the audio. + string language_code = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Alternative hypotheses (a.k.a. n-best list). @@ -523,12 +593,12 @@ message SpeechRecognitionAlternative { // This field is not guaranteed to be accurate and users should not rely on it // to be always provided. // The default of 0.0 is a sentinel value indicating `confidence` was not set. - float confidence = 2; + float confidence = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A list of word-specific information for each recognized word. // Note: When `enable_speaker_diarization` is true, you will see all the words // from the beginning of the audio. - repeated WordInfo words = 3; + repeated WordInfo words = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Word-specific information for recognized words. Word information is only @@ -556,13 +626,13 @@ message WordInfo { // This field is not guaranteed to be accurate and users should not rely on it // to be always provided. // The default of 0.0 is a sentinel value indicating `confidence` was not set. - float confidence = 4; + float confidence = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A distinct integer value is assigned for every speaker within // the audio. This field specifies which one of those speakers was detected to // have spoken this word. Value ranges from 1 up to diarization_speaker_count, // and is only set if speaker diarization is enabled. - int32 speaker_tag = 5; + int32 speaker_tag = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A vertex represents a 2D point in the image. @@ -673,66 +743,3 @@ message ObjectTrackingAnnotation { // Streaming mode: it can only be one ObjectTrackingFrame message in frames. repeated ObjectTrackingFrame frames = 2; } - -// Video annotation feature. -enum Feature { - // Unspecified. - FEATURE_UNSPECIFIED = 0; - - // Label detection. Detect objects, such as dog or flower. - LABEL_DETECTION = 1; - - // Shot change detection. - SHOT_CHANGE_DETECTION = 2; - - // Explicit content detection. - EXPLICIT_CONTENT_DETECTION = 3; - - // Human face detection and tracking. - FACE_DETECTION = 4; - - // Speech transcription. - SPEECH_TRANSCRIPTION = 6; - - // OCR text detection and tracking. - TEXT_DETECTION = 7; - - // Object detection and tracking. - OBJECT_TRACKING = 9; -} - -// Label detection mode. -enum LabelDetectionMode { - // Unspecified. - LABEL_DETECTION_MODE_UNSPECIFIED = 0; - - // Detect shot-level labels. - SHOT_MODE = 1; - - // Detect frame-level labels. - FRAME_MODE = 2; - - // Detect both shot-level and frame-level labels. - SHOT_AND_FRAME_MODE = 3; -} - -// Bucketized representation of likelihood. -enum Likelihood { - // Unspecified likelihood. - LIKELIHOOD_UNSPECIFIED = 0; - - // Very unlikely. - VERY_UNLIKELY = 1; - - // Unlikely. - UNLIKELY = 2; - - // Possible. - POSSIBLE = 3; - - // Likely. - LIKELY = 4; - - // Very likely. - VERY_LIKELY = 5; -} diff --git a/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence_pb2.py b/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence_pb2.py index 755f13437776..869eeae436ce 100644 --- a/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence_pb2.py +++ b/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence_pb2.py @@ -18,6 +18,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.longrunning import ( operations_pb2 as google_dot_longrunning_dot_operations__pb2, ) @@ -34,11 +35,12 @@ "\n%com.google.cloud.videointelligence.v1B\035VideoIntelligenceServiceProtoP\001ZRgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1;videointelligence\252\002!Google.Cloud.VideoIntelligence.V1\312\002!Google\\Cloud\\VideoIntelligence\\V1\352\002$Google::Cloud::VideoIntelligence::V1" ), serialized_pb=_b( - '\n@google/cloud/videointelligence_v1/proto/video_intelligence.proto\x12!google.cloud.videointelligence.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xef\x01\n\x14\x41nnotateVideoRequest\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x15\n\rinput_content\x18\x06 \x01(\x0c\x12<\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0e\x32*.google.cloud.videointelligence.v1.Feature\x12\x46\n\rvideo_context\x18\x03 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoContext\x12\x12\n\noutput_uri\x18\x04 \x01(\t\x12\x13\n\x0blocation_id\x18\x05 \x01(\t"\xe6\x05\n\x0cVideoContext\x12\x41\n\x08segments\x18\x01 \x03(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12W\n\x16label_detection_config\x18\x02 \x01(\x0b\x32\x37.google.cloud.videointelligence.v1.LabelDetectionConfig\x12\x62\n\x1cshot_change_detection_config\x18\x03 \x01(\x0b\x32<.google.cloud.videointelligence.v1.ShotChangeDetectionConfig\x12l\n!explicit_content_detection_config\x18\x04 \x01(\x0b\x32\x41.google.cloud.videointelligence.v1.ExplicitContentDetectionConfig\x12U\n\x15\x66\x61\x63\x65_detection_config\x18\x05 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1.FaceDetectionConfig\x12\x61\n\x1bspeech_transcription_config\x18\x06 \x01(\x0b\x32<.google.cloud.videointelligence.v1.SpeechTranscriptionConfig\x12U\n\x15text_detection_config\x18\x08 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1.TextDetectionConfig\x12W\n\x16object_tracking_config\x18\r \x01(\x0b\x32\x37.google.cloud.videointelligence.v1.ObjectTrackingConfig"\xdd\x01\n\x14LabelDetectionConfig\x12S\n\x14label_detection_mode\x18\x01 \x01(\x0e\x32\x35.google.cloud.videointelligence.v1.LabelDetectionMode\x12\x19\n\x11stationary_camera\x18\x02 \x01(\x08\x12\r\n\x05model\x18\x03 \x01(\t\x12"\n\x1a\x66rame_confidence_threshold\x18\x04 \x01(\x02\x12"\n\x1avideo_confidence_threshold\x18\x05 \x01(\x02"*\n\x19ShotChangeDetectionConfig\x12\r\n\x05model\x18\x01 \x01(\t"/\n\x1e\x45xplicitContentDetectionConfig\x12\r\n\x05model\x18\x01 \x01(\t"D\n\x13\x46\x61\x63\x65\x44\x65tectionConfig\x12\r\n\x05model\x18\x01 \x01(\t\x12\x1e\n\x16include_bounding_boxes\x18\x02 \x01(\x08"%\n\x14ObjectTrackingConfig\x12\r\n\x05model\x18\x01 \x01(\t"<\n\x13TextDetectionConfig\x12\x16\n\x0elanguage_hints\x18\x01 \x03(\t\x12\r\n\x05model\x18\x02 \x01(\t"x\n\x0cVideoSegment\x12\x34\n\x11start_time_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0f\x65nd_time_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"d\n\x0cLabelSegment\x12@\n\x07segment\x18\x01 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02"P\n\nLabelFrame\x12.\n\x0btime_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x12\n\nconfidence\x18\x02 \x01(\x02"G\n\x06\x45ntity\x12\x11\n\tentity_id\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x15\n\rlanguage_code\x18\x03 \x01(\t"\x94\x02\n\x0fLabelAnnotation\x12\x39\n\x06\x65ntity\x18\x01 \x01(\x0b\x32).google.cloud.videointelligence.v1.Entity\x12\x44\n\x11\x63\x61tegory_entities\x18\x02 \x03(\x0b\x32).google.cloud.videointelligence.v1.Entity\x12\x41\n\x08segments\x18\x03 \x03(\x0b\x32/.google.cloud.videointelligence.v1.LabelSegment\x12=\n\x06\x66rames\x18\x04 \x03(\x0b\x32-.google.cloud.videointelligence.v1.LabelFrame"\x95\x01\n\x14\x45xplicitContentFrame\x12.\n\x0btime_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12M\n\x16pornography_likelihood\x18\x02 \x01(\x0e\x32-.google.cloud.videointelligence.v1.Likelihood"d\n\x19\x45xplicitContentAnnotation\x12G\n\x06\x66rames\x18\x01 \x03(\x0b\x32\x37.google.cloud.videointelligence.v1.ExplicitContentFrame"Q\n\x15NormalizedBoundingBox\x12\x0c\n\x04left\x18\x01 \x01(\x02\x12\x0b\n\x03top\x18\x02 \x01(\x02\x12\r\n\x05right\x18\x03 \x01(\x02\x12\x0e\n\x06\x62ottom\x18\x04 \x01(\x02"O\n\x0b\x46\x61\x63\x65Segment\x12@\n\x07segment\x18\x01 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment"\x98\x01\n\tFaceFrame\x12[\n\x19normalized_bounding_boxes\x18\x01 \x03(\x0b\x32\x38.google.cloud.videointelligence.v1.NormalizedBoundingBox\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xa3\x01\n\x0e\x46\x61\x63\x65\x41nnotation\x12\x11\n\tthumbnail\x18\x01 \x01(\x0c\x12@\n\x08segments\x18\x02 \x03(\x0b\x32..google.cloud.videointelligence.v1.FaceSegment\x12<\n\x06\x66rames\x18\x03 \x03(\x0b\x32,.google.cloud.videointelligence.v1.FaceFrame"\xbd\x08\n\x16VideoAnnotationResults\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12@\n\x07segment\x18\n \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12U\n\x19segment_label_annotations\x18\x02 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12^\n"segment_presence_label_annotations\x18\x17 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12R\n\x16shot_label_annotations\x18\x03 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12[\n\x1fshot_presence_label_annotations\x18\x18 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12S\n\x17\x66rame_label_annotations\x18\x04 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12K\n\x10\x66\x61\x63\x65_annotations\x18\x05 \x03(\x0b\x32\x31.google.cloud.videointelligence.v1.FaceAnnotation\x12I\n\x10shot_annotations\x18\x06 \x03(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12Y\n\x13\x65xplicit_annotation\x18\x07 \x01(\x0b\x32<.google.cloud.videointelligence.v1.ExplicitContentAnnotation\x12U\n\x15speech_transcriptions\x18\x0b \x03(\x0b\x32\x36.google.cloud.videointelligence.v1.SpeechTranscription\x12K\n\x10text_annotations\x18\x0c \x03(\x0b\x32\x31.google.cloud.videointelligence.v1.TextAnnotation\x12W\n\x12object_annotations\x18\x0e \x03(\x0b\x32;.google.cloud.videointelligence.v1.ObjectTrackingAnnotation\x12!\n\x05\x65rror\x18\t \x01(\x0b\x32\x12.google.rpc.Status"n\n\x15\x41nnotateVideoResponse\x12U\n\x12\x61nnotation_results\x18\x01 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1.VideoAnnotationResults"\xa6\x02\n\x17VideoAnnotationProgress\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x18\n\x10progress_percent\x18\x02 \x01(\x05\x12.\n\nstart_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12;\n\x07\x66\x65\x61ture\x18\x05 \x01(\x0e\x32*.google.cloud.videointelligence.v1.Feature\x12@\n\x07segment\x18\x06 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment"p\n\x15\x41nnotateVideoProgress\x12W\n\x13\x61nnotation_progress\x18\x01 \x03(\x0b\x32:.google.cloud.videointelligence.v1.VideoAnnotationProgress"\xd4\x02\n\x19SpeechTranscriptionConfig\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x18\n\x10max_alternatives\x18\x02 \x01(\x05\x12\x18\n\x10\x66ilter_profanity\x18\x03 \x01(\x08\x12I\n\x0fspeech_contexts\x18\x04 \x03(\x0b\x32\x30.google.cloud.videointelligence.v1.SpeechContext\x12$\n\x1c\x65nable_automatic_punctuation\x18\x05 \x01(\x08\x12\x14\n\x0c\x61udio_tracks\x18\x06 \x03(\x05\x12"\n\x1a\x65nable_speaker_diarization\x18\x07 \x01(\x08\x12!\n\x19\x64iarization_speaker_count\x18\x08 \x01(\x05\x12\x1e\n\x16\x65nable_word_confidence\x18\t \x01(\x08" \n\rSpeechContext\x12\x0f\n\x07phrases\x18\x01 \x03(\t"\x83\x01\n\x13SpeechTranscription\x12U\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32?.google.cloud.videointelligence.v1.SpeechRecognitionAlternative\x12\x15\n\rlanguage_code\x18\x02 \x01(\t"\x82\x01\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12:\n\x05words\x18\x03 \x03(\x0b\x32+.google.cloud.videointelligence.v1.WordInfo"\x9d\x01\n\x08WordInfo\x12-\n\nstart_time\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12+\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0c\n\x04word\x18\x03 \x01(\t\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x12\x13\n\x0bspeaker_tag\x18\x05 \x01(\x05"(\n\x10NormalizedVertex\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02"_\n\x16NormalizedBoundingPoly\x12\x45\n\x08vertices\x18\x01 \x03(\x0b\x32\x33.google.cloud.videointelligence.v1.NormalizedVertex"\xa1\x01\n\x0bTextSegment\x12@\n\x07segment\x18\x01 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12<\n\x06\x66rames\x18\x03 \x03(\x0b\x32,.google.cloud.videointelligence.v1.TextFrame"\x94\x01\n\tTextFrame\x12W\n\x14rotated_bounding_box\x18\x01 \x01(\x0b\x32\x39.google.cloud.videointelligence.v1.NormalizedBoundingPoly\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"`\n\x0eTextAnnotation\x12\x0c\n\x04text\x18\x01 \x01(\t\x12@\n\x08segments\x18\x02 \x03(\x0b\x32..google.cloud.videointelligence.v1.TextSegment"\xa0\x01\n\x13ObjectTrackingFrame\x12Y\n\x17normalized_bounding_box\x18\x01 \x01(\x0b\x32\x38.google.cloud.videointelligence.v1.NormalizedBoundingBox\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\x97\x02\n\x18ObjectTrackingAnnotation\x12\x42\n\x07segment\x18\x03 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegmentH\x00\x12\x12\n\x08track_id\x18\x05 \x01(\x03H\x00\x12\x39\n\x06\x65ntity\x18\x01 \x01(\x0b\x32).google.cloud.videointelligence.v1.Entity\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x12\x46\n\x06\x66rames\x18\x02 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1.ObjectTrackingFrameB\x0c\n\ntrack_info*\xc9\x01\n\x07\x46\x65\x61ture\x12\x17\n\x13\x46\x45\x41TURE_UNSPECIFIED\x10\x00\x12\x13\n\x0fLABEL_DETECTION\x10\x01\x12\x19\n\x15SHOT_CHANGE_DETECTION\x10\x02\x12\x1e\n\x1a\x45XPLICIT_CONTENT_DETECTION\x10\x03\x12\x12\n\x0e\x46\x41\x43\x45_DETECTION\x10\x04\x12\x18\n\x14SPEECH_TRANSCRIPTION\x10\x06\x12\x12\n\x0eTEXT_DETECTION\x10\x07\x12\x13\n\x0fOBJECT_TRACKING\x10\t*r\n\x12LabelDetectionMode\x12$\n LABEL_DETECTION_MODE_UNSPECIFIED\x10\x00\x12\r\n\tSHOT_MODE\x10\x01\x12\x0e\n\nFRAME_MODE\x10\x02\x12\x17\n\x13SHOT_AND_FRAME_MODE\x10\x03*t\n\nLikelihood\x12\x1a\n\x16LIKELIHOOD_UNSPECIFIED\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05\x32\xfa\x01\n\x18VideoIntelligenceService\x12\x87\x01\n\rAnnotateVideo\x12\x37.google.cloud.videointelligence.v1.AnnotateVideoRequest\x1a\x1d.google.longrunning.Operation"\x1e\x82\xd3\xe4\x93\x02\x18"\x13/v1/videos:annotate:\x01*\x1aT\xca\x41 videointelligence.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x8b\x02\n%com.google.cloud.videointelligence.v1B\x1dVideoIntelligenceServiceProtoP\x01ZRgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1;videointelligence\xaa\x02!Google.Cloud.VideoIntelligence.V1\xca\x02!Google\\Cloud\\VideoIntelligence\\V1\xea\x02$Google::Cloud::VideoIntelligence::V1b\x06proto3' + '\n@google/cloud/videointelligence_v1/proto/video_intelligence.proto\x12!google.cloud.videointelligence.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xfe\x01\n\x14\x41nnotateVideoRequest\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x15\n\rinput_content\x18\x06 \x01(\x0c\x12\x41\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0e\x32*.google.cloud.videointelligence.v1.FeatureB\x03\xe0\x41\x02\x12\x46\n\rvideo_context\x18\x03 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoContext\x12\x17\n\noutput_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0blocation_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\xe6\x05\n\x0cVideoContext\x12\x41\n\x08segments\x18\x01 \x03(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12W\n\x16label_detection_config\x18\x02 \x01(\x0b\x32\x37.google.cloud.videointelligence.v1.LabelDetectionConfig\x12\x62\n\x1cshot_change_detection_config\x18\x03 \x01(\x0b\x32<.google.cloud.videointelligence.v1.ShotChangeDetectionConfig\x12l\n!explicit_content_detection_config\x18\x04 \x01(\x0b\x32\x41.google.cloud.videointelligence.v1.ExplicitContentDetectionConfig\x12U\n\x15\x66\x61\x63\x65_detection_config\x18\x05 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1.FaceDetectionConfig\x12\x61\n\x1bspeech_transcription_config\x18\x06 \x01(\x0b\x32<.google.cloud.videointelligence.v1.SpeechTranscriptionConfig\x12U\n\x15text_detection_config\x18\x08 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1.TextDetectionConfig\x12W\n\x16object_tracking_config\x18\r \x01(\x0b\x32\x37.google.cloud.videointelligence.v1.ObjectTrackingConfig"\xdd\x01\n\x14LabelDetectionConfig\x12S\n\x14label_detection_mode\x18\x01 \x01(\x0e\x32\x35.google.cloud.videointelligence.v1.LabelDetectionMode\x12\x19\n\x11stationary_camera\x18\x02 \x01(\x08\x12\r\n\x05model\x18\x03 \x01(\t\x12"\n\x1a\x66rame_confidence_threshold\x18\x04 \x01(\x02\x12"\n\x1avideo_confidence_threshold\x18\x05 \x01(\x02"*\n\x19ShotChangeDetectionConfig\x12\r\n\x05model\x18\x01 \x01(\t"%\n\x14ObjectTrackingConfig\x12\r\n\x05model\x18\x01 \x01(\t"D\n\x13\x46\x61\x63\x65\x44\x65tectionConfig\x12\r\n\x05model\x18\x01 \x01(\t\x12\x1e\n\x16include_bounding_boxes\x18\x02 \x01(\x08"/\n\x1e\x45xplicitContentDetectionConfig\x12\r\n\x05model\x18\x01 \x01(\t"<\n\x13TextDetectionConfig\x12\x16\n\x0elanguage_hints\x18\x01 \x03(\t\x12\r\n\x05model\x18\x02 \x01(\t"x\n\x0cVideoSegment\x12\x34\n\x11start_time_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0f\x65nd_time_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"d\n\x0cLabelSegment\x12@\n\x07segment\x18\x01 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02"P\n\nLabelFrame\x12.\n\x0btime_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x12\n\nconfidence\x18\x02 \x01(\x02"G\n\x06\x45ntity\x12\x11\n\tentity_id\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x15\n\rlanguage_code\x18\x03 \x01(\t"\x94\x02\n\x0fLabelAnnotation\x12\x39\n\x06\x65ntity\x18\x01 \x01(\x0b\x32).google.cloud.videointelligence.v1.Entity\x12\x44\n\x11\x63\x61tegory_entities\x18\x02 \x03(\x0b\x32).google.cloud.videointelligence.v1.Entity\x12\x41\n\x08segments\x18\x03 \x03(\x0b\x32/.google.cloud.videointelligence.v1.LabelSegment\x12=\n\x06\x66rames\x18\x04 \x03(\x0b\x32-.google.cloud.videointelligence.v1.LabelFrame"\x95\x01\n\x14\x45xplicitContentFrame\x12.\n\x0btime_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12M\n\x16pornography_likelihood\x18\x02 \x01(\x0e\x32-.google.cloud.videointelligence.v1.Likelihood"d\n\x19\x45xplicitContentAnnotation\x12G\n\x06\x66rames\x18\x01 \x03(\x0b\x32\x37.google.cloud.videointelligence.v1.ExplicitContentFrame"Q\n\x15NormalizedBoundingBox\x12\x0c\n\x04left\x18\x01 \x01(\x02\x12\x0b\n\x03top\x18\x02 \x01(\x02\x12\r\n\x05right\x18\x03 \x01(\x02\x12\x0e\n\x06\x62ottom\x18\x04 \x01(\x02"O\n\x0b\x46\x61\x63\x65Segment\x12@\n\x07segment\x18\x01 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment"\x98\x01\n\tFaceFrame\x12[\n\x19normalized_bounding_boxes\x18\x01 \x03(\x0b\x32\x38.google.cloud.videointelligence.v1.NormalizedBoundingBox\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xa3\x01\n\x0e\x46\x61\x63\x65\x41nnotation\x12\x11\n\tthumbnail\x18\x01 \x01(\x0c\x12@\n\x08segments\x18\x02 \x03(\x0b\x32..google.cloud.videointelligence.v1.FaceSegment\x12<\n\x06\x66rames\x18\x03 \x03(\x0b\x32,.google.cloud.videointelligence.v1.FaceFrame"\xbd\x08\n\x16VideoAnnotationResults\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12@\n\x07segment\x18\n \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12U\n\x19segment_label_annotations\x18\x02 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12^\n"segment_presence_label_annotations\x18\x17 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12R\n\x16shot_label_annotations\x18\x03 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12[\n\x1fshot_presence_label_annotations\x18\x18 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12S\n\x17\x66rame_label_annotations\x18\x04 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12K\n\x10\x66\x61\x63\x65_annotations\x18\x05 \x03(\x0b\x32\x31.google.cloud.videointelligence.v1.FaceAnnotation\x12I\n\x10shot_annotations\x18\x06 \x03(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12Y\n\x13\x65xplicit_annotation\x18\x07 \x01(\x0b\x32<.google.cloud.videointelligence.v1.ExplicitContentAnnotation\x12U\n\x15speech_transcriptions\x18\x0b \x03(\x0b\x32\x36.google.cloud.videointelligence.v1.SpeechTranscription\x12K\n\x10text_annotations\x18\x0c \x03(\x0b\x32\x31.google.cloud.videointelligence.v1.TextAnnotation\x12W\n\x12object_annotations\x18\x0e \x03(\x0b\x32;.google.cloud.videointelligence.v1.ObjectTrackingAnnotation\x12!\n\x05\x65rror\x18\t \x01(\x0b\x32\x12.google.rpc.Status"n\n\x15\x41nnotateVideoResponse\x12U\n\x12\x61nnotation_results\x18\x01 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1.VideoAnnotationResults"\xa6\x02\n\x17VideoAnnotationProgress\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x18\n\x10progress_percent\x18\x02 \x01(\x05\x12.\n\nstart_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12;\n\x07\x66\x65\x61ture\x18\x05 \x01(\x0e\x32*.google.cloud.videointelligence.v1.Feature\x12@\n\x07segment\x18\x06 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment"p\n\x15\x41nnotateVideoProgress\x12W\n\x13\x61nnotation_progress\x18\x01 \x03(\x0b\x32:.google.cloud.videointelligence.v1.VideoAnnotationProgress"\x81\x03\n\x19SpeechTranscriptionConfig\x12\x1a\n\rlanguage_code\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x1d\n\x10max_alternatives\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1d\n\x10\x66ilter_profanity\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12N\n\x0fspeech_contexts\x18\x04 \x03(\x0b\x32\x30.google.cloud.videointelligence.v1.SpeechContextB\x03\xe0\x41\x01\x12)\n\x1c\x65nable_automatic_punctuation\x18\x05 \x01(\x08\x42\x03\xe0\x41\x01\x12\x19\n\x0c\x61udio_tracks\x18\x06 \x03(\x05\x42\x03\xe0\x41\x01\x12\'\n\x1a\x65nable_speaker_diarization\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12&\n\x19\x64iarization_speaker_count\x18\x08 \x01(\x05\x42\x03\xe0\x41\x01\x12#\n\x16\x65nable_word_confidence\x18\t \x01(\x08\x42\x03\xe0\x41\x01"%\n\rSpeechContext\x12\x14\n\x07phrases\x18\x01 \x03(\tB\x03\xe0\x41\x01"\x88\x01\n\x13SpeechTranscription\x12U\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32?.google.cloud.videointelligence.v1.SpeechRecognitionAlternative\x12\x1a\n\rlanguage_code\x18\x02 \x01(\tB\x03\xe0\x41\x03"\x8c\x01\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x17\n\nconfidence\x18\x02 \x01(\x02\x42\x03\xe0\x41\x03\x12?\n\x05words\x18\x03 \x03(\x0b\x32+.google.cloud.videointelligence.v1.WordInfoB\x03\xe0\x41\x03"\xa7\x01\n\x08WordInfo\x12-\n\nstart_time\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12+\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0c\n\x04word\x18\x03 \x01(\t\x12\x17\n\nconfidence\x18\x04 \x01(\x02\x42\x03\xe0\x41\x03\x12\x18\n\x0bspeaker_tag\x18\x05 \x01(\x05\x42\x03\xe0\x41\x03"(\n\x10NormalizedVertex\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02"_\n\x16NormalizedBoundingPoly\x12\x45\n\x08vertices\x18\x01 \x03(\x0b\x32\x33.google.cloud.videointelligence.v1.NormalizedVertex"\xa1\x01\n\x0bTextSegment\x12@\n\x07segment\x18\x01 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12<\n\x06\x66rames\x18\x03 \x03(\x0b\x32,.google.cloud.videointelligence.v1.TextFrame"\x94\x01\n\tTextFrame\x12W\n\x14rotated_bounding_box\x18\x01 \x01(\x0b\x32\x39.google.cloud.videointelligence.v1.NormalizedBoundingPoly\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"`\n\x0eTextAnnotation\x12\x0c\n\x04text\x18\x01 \x01(\t\x12@\n\x08segments\x18\x02 \x03(\x0b\x32..google.cloud.videointelligence.v1.TextSegment"\xa0\x01\n\x13ObjectTrackingFrame\x12Y\n\x17normalized_bounding_box\x18\x01 \x01(\x0b\x32\x38.google.cloud.videointelligence.v1.NormalizedBoundingBox\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\x97\x02\n\x18ObjectTrackingAnnotation\x12\x42\n\x07segment\x18\x03 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegmentH\x00\x12\x12\n\x08track_id\x18\x05 \x01(\x03H\x00\x12\x39\n\x06\x65ntity\x18\x01 \x01(\x0b\x32).google.cloud.videointelligence.v1.Entity\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x12\x46\n\x06\x66rames\x18\x02 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1.ObjectTrackingFrameB\x0c\n\ntrack_info*\xc9\x01\n\x07\x46\x65\x61ture\x12\x17\n\x13\x46\x45\x41TURE_UNSPECIFIED\x10\x00\x12\x13\n\x0fLABEL_DETECTION\x10\x01\x12\x19\n\x15SHOT_CHANGE_DETECTION\x10\x02\x12\x1e\n\x1a\x45XPLICIT_CONTENT_DETECTION\x10\x03\x12\x12\n\x0e\x46\x41\x43\x45_DETECTION\x10\x04\x12\x18\n\x14SPEECH_TRANSCRIPTION\x10\x06\x12\x12\n\x0eTEXT_DETECTION\x10\x07\x12\x13\n\x0fOBJECT_TRACKING\x10\t*r\n\x12LabelDetectionMode\x12$\n LABEL_DETECTION_MODE_UNSPECIFIED\x10\x00\x12\r\n\tSHOT_MODE\x10\x01\x12\x0e\n\nFRAME_MODE\x10\x02\x12\x17\n\x13SHOT_AND_FRAME_MODE\x10\x03*t\n\nLikelihood\x12\x1a\n\x16LIKELIHOOD_UNSPECIFIED\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05\x32\xc0\x02\n\x18VideoIntelligenceService\x12\xcd\x01\n\rAnnotateVideo\x12\x37.google.cloud.videointelligence.v1.AnnotateVideoRequest\x1a\x1d.google.longrunning.Operation"d\x82\xd3\xe4\x93\x02\x18"\x13/v1/videos:annotate:\x01*\xda\x41\x12input_uri,features\xca\x41.\n\x15\x41nnotateVideoResponse\x12\x15\x41nnotateVideoProgress\x1aT\xca\x41 videointelligence.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x8b\x02\n%com.google.cloud.videointelligence.v1B\x1dVideoIntelligenceServiceProtoP\x01ZRgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1;videointelligence\xaa\x02!Google.Cloud.VideoIntelligence.V1\xca\x02!Google\\Cloud\\VideoIntelligence\\V1\xea\x02$Google::Cloud::VideoIntelligence::V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, @@ -103,8 +105,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6570, - serialized_end=6771, + serialized_start=6693, + serialized_end=6894, ) _sym_db.RegisterEnumDescriptor(_FEATURE) @@ -138,8 +140,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6773, - serialized_end=6887, + serialized_start=6896, + serialized_end=7010, ) _sym_db.RegisterEnumDescriptor(_LABELDETECTIONMODE) @@ -175,8 +177,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6889, - serialized_end=7005, + serialized_start=7012, + serialized_end=7128, ) _sym_db.RegisterEnumDescriptor(_LIKELIHOOD) @@ -259,7 +261,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -295,7 +297,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -313,7 +315,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -325,8 +327,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=286, - serialized_end=525, + serialized_start=319, + serialized_end=573, ) @@ -490,8 +492,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=528, - serialized_end=1270, + serialized_start=576, + serialized_end=1318, ) @@ -601,8 +603,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1273, - serialized_end=1494, + serialized_start=1321, + serialized_end=1542, ) @@ -640,21 +642,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1496, - serialized_end=1538, + serialized_start=1544, + serialized_end=1586, ) -_EXPLICITCONTENTDETECTIONCONFIG = _descriptor.Descriptor( - name="ExplicitContentDetectionConfig", - full_name="google.cloud.videointelligence.v1.ExplicitContentDetectionConfig", +_OBJECTTRACKINGCONFIG = _descriptor.Descriptor( + name="ObjectTrackingConfig", + full_name="google.cloud.videointelligence.v1.ObjectTrackingConfig", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="model", - full_name="google.cloud.videointelligence.v1.ExplicitContentDetectionConfig.model", + full_name="google.cloud.videointelligence.v1.ObjectTrackingConfig.model", index=0, number=1, type=9, @@ -679,8 +681,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1540, - serialized_end=1587, + serialized_start=1588, + serialized_end=1625, ) @@ -736,21 +738,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1589, - serialized_end=1657, + serialized_start=1627, + serialized_end=1695, ) -_OBJECTTRACKINGCONFIG = _descriptor.Descriptor( - name="ObjectTrackingConfig", - full_name="google.cloud.videointelligence.v1.ObjectTrackingConfig", +_EXPLICITCONTENTDETECTIONCONFIG = _descriptor.Descriptor( + name="ExplicitContentDetectionConfig", + full_name="google.cloud.videointelligence.v1.ExplicitContentDetectionConfig", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="model", - full_name="google.cloud.videointelligence.v1.ObjectTrackingConfig.model", + full_name="google.cloud.videointelligence.v1.ExplicitContentDetectionConfig.model", index=0, number=1, type=9, @@ -775,8 +777,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1659, - serialized_end=1696, + serialized_start=1697, + serialized_end=1744, ) @@ -832,8 +834,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1698, - serialized_end=1758, + serialized_start=1746, + serialized_end=1806, ) @@ -889,8 +891,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1760, - serialized_end=1880, + serialized_start=1808, + serialized_end=1928, ) @@ -946,8 +948,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1882, - serialized_end=1982, + serialized_start=1930, + serialized_end=2030, ) @@ -1003,8 +1005,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1984, - serialized_end=2064, + serialized_start=2032, + serialized_end=2112, ) @@ -1078,8 +1080,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2066, - serialized_end=2137, + serialized_start=2114, + serialized_end=2185, ) @@ -1171,8 +1173,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2140, - serialized_end=2416, + serialized_start=2188, + serialized_end=2464, ) @@ -1228,8 +1230,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2419, - serialized_end=2568, + serialized_start=2467, + serialized_end=2616, ) @@ -1267,8 +1269,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2570, - serialized_end=2670, + serialized_start=2618, + serialized_end=2718, ) @@ -1360,8 +1362,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2672, - serialized_end=2753, + serialized_start=2720, + serialized_end=2801, ) @@ -1399,8 +1401,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2755, - serialized_end=2834, + serialized_start=2803, + serialized_end=2882, ) @@ -1456,8 +1458,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2837, - serialized_end=2989, + serialized_start=2885, + serialized_end=3037, ) @@ -1531,8 +1533,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2992, - serialized_end=3155, + serialized_start=3040, + serialized_end=3203, ) @@ -1804,8 +1806,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3158, - serialized_end=4243, + serialized_start=3206, + serialized_end=4291, ) @@ -1843,8 +1845,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4245, - serialized_end=4355, + serialized_start=4293, + serialized_end=4403, ) @@ -1972,8 +1974,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4358, - serialized_end=4652, + serialized_start=4406, + serialized_end=4700, ) @@ -2011,8 +2013,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4654, - serialized_end=4766, + serialized_start=4702, + serialized_end=4814, ) @@ -2038,7 +2040,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2056,7 +2058,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2074,7 +2076,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2092,7 +2094,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2110,7 +2112,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2128,7 +2130,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2146,7 +2148,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2164,7 +2166,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2182,7 +2184,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2194,8 +2196,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4769, - serialized_end=5109, + serialized_start=4817, + serialized_end=5202, ) @@ -2221,7 +2223,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -2233,8 +2235,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5111, - serialized_end=5143, + serialized_start=5204, + serialized_end=5241, ) @@ -2278,7 +2280,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2290,8 +2292,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5146, - serialized_end=5277, + serialized_start=5244, + serialized_end=5380, ) @@ -2335,7 +2337,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2353,7 +2355,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2365,8 +2367,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5280, - serialized_end=5410, + serialized_start=5383, + serialized_end=5523, ) @@ -2446,7 +2448,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2464,7 +2466,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2476,8 +2478,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5413, - serialized_end=5570, + serialized_start=5526, + serialized_end=5693, ) @@ -2533,8 +2535,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5572, - serialized_end=5612, + serialized_start=5695, + serialized_end=5735, ) @@ -2572,8 +2574,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5614, - serialized_end=5709, + serialized_start=5737, + serialized_end=5832, ) @@ -2647,8 +2649,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5712, - serialized_end=5873, + serialized_start=5835, + serialized_end=5996, ) @@ -2704,8 +2706,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5876, - serialized_end=6024, + serialized_start=5999, + serialized_end=6147, ) @@ -2761,8 +2763,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6026, - serialized_end=6122, + serialized_start=6149, + serialized_end=6245, ) @@ -2818,8 +2820,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6125, - serialized_end=6285, + serialized_start=6248, + serialized_end=6408, ) @@ -2937,8 +2939,8 @@ fields=[], ) ], - serialized_start=6288, - serialized_end=6567, + serialized_start=6411, + serialized_end=6690, ) _ANNOTATEVIDEOREQUEST.fields_by_name["features"].enum_type = _FEATURE @@ -3093,11 +3095,11 @@ DESCRIPTOR.message_types_by_name[ "ShotChangeDetectionConfig" ] = _SHOTCHANGEDETECTIONCONFIG +DESCRIPTOR.message_types_by_name["ObjectTrackingConfig"] = _OBJECTTRACKINGCONFIG +DESCRIPTOR.message_types_by_name["FaceDetectionConfig"] = _FACEDETECTIONCONFIG DESCRIPTOR.message_types_by_name[ "ExplicitContentDetectionConfig" ] = _EXPLICITCONTENTDETECTIONCONFIG -DESCRIPTOR.message_types_by_name["FaceDetectionConfig"] = _FACEDETECTIONCONFIG -DESCRIPTOR.message_types_by_name["ObjectTrackingConfig"] = _OBJECTTRACKINGCONFIG DESCRIPTOR.message_types_by_name["TextDetectionConfig"] = _TEXTDETECTIONCONFIG DESCRIPTOR.message_types_by_name["VideoSegment"] = _VIDEOSEGMENT DESCRIPTOR.message_types_by_name["LabelSegment"] = _LABELSEGMENT @@ -3165,11 +3167,11 @@ specified via ``input_uri``. If set, ``input_uri`` should be unset. features: - Requested video annotation features. + Required. Requested video annotation features. video_context: Additional video context and/or feature-specific parameters. output_uri: - Optional location where the output (in JSON format) should be + Optional. Location where the output (in JSON format) should be stored. Currently, only `Google Cloud Storage `__ URIs are supported, which must be specified in the following format: @@ -3178,7 +3180,7 @@ ]). For more information, see `Request URIs `__. location_id: - Optional cloud region where annotation should take place. + Optional. Cloud region where annotation should take place. Supported cloud regions: ``us-east1``, ``us-west1``, ``europe- west1``, ``asia-east1``. If no region is specified, a region will be determined based on video file location. @@ -3285,24 +3287,24 @@ ) _sym_db.RegisterMessage(ShotChangeDetectionConfig) -ExplicitContentDetectionConfig = _reflection.GeneratedProtocolMessageType( - "ExplicitContentDetectionConfig", +ObjectTrackingConfig = _reflection.GeneratedProtocolMessageType( + "ObjectTrackingConfig", (_message.Message,), dict( - DESCRIPTOR=_EXPLICITCONTENTDETECTIONCONFIG, + DESCRIPTOR=_OBJECTTRACKINGCONFIG, __module__="google.cloud.videointelligence_v1.proto.video_intelligence_pb2", - __doc__="""Config for EXPLICIT\_CONTENT\_DETECTION. + __doc__="""Config for OBJECT\_TRACKING. Attributes: model: - Model to use for explicit content detection. Supported values: + Model to use for object tracking. Supported values: "builtin/stable" (the default if unset) and "builtin/latest". """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.ExplicitContentDetectionConfig) + # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.ObjectTrackingConfig) ), ) -_sym_db.RegisterMessage(ExplicitContentDetectionConfig) +_sym_db.RegisterMessage(ObjectTrackingConfig) FaceDetectionConfig = _reflection.GeneratedProtocolMessageType( "FaceDetectionConfig", @@ -3326,24 +3328,24 @@ ) _sym_db.RegisterMessage(FaceDetectionConfig) -ObjectTrackingConfig = _reflection.GeneratedProtocolMessageType( - "ObjectTrackingConfig", +ExplicitContentDetectionConfig = _reflection.GeneratedProtocolMessageType( + "ExplicitContentDetectionConfig", (_message.Message,), dict( - DESCRIPTOR=_OBJECTTRACKINGCONFIG, + DESCRIPTOR=_EXPLICITCONTENTDETECTIONCONFIG, __module__="google.cloud.videointelligence_v1.proto.video_intelligence_pb2", - __doc__="""Config for OBJECT\_TRACKING. + __doc__="""Config for EXPLICIT\_CONTENT\_DETECTION. Attributes: model: - Model to use for object tracking. Supported values: + Model to use for explicit content detection. Supported values: "builtin/stable" (the default if unset) and "builtin/latest". """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.ObjectTrackingConfig) + # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.ExplicitContentDetectionConfig) ), ) -_sym_db.RegisterMessage(ObjectTrackingConfig) +_sym_db.RegisterMessage(ExplicitContentDetectionConfig) TextDetectionConfig = _reflection.GeneratedProtocolMessageType( "TextDetectionConfig", @@ -3635,15 +3637,23 @@ segment_presence_label_annotations: Presence label annotations on video level or user specified segment level. There is exactly one element for each unique - label. This will eventually get publicly exposed and the - restriction will be removed. + label. Compared to the existing topical + ``segment_label_annotations``, this field presents more fine- + grained, segment-level labels detected in video content and is + made available only when the client sets + ``LabelDetectionConfig.model`` to "builtin/latest" in the + request. shot_label_annotations: Topical label annotations on shot level. There is exactly one element for each unique label. shot_presence_label_annotations: Presence label annotations on shot level. There is exactly one - element for each unique label. This will eventually get - publicly exposed and the restriction will be removed. + element for each unique label. Compared to the existing + topical ``shot_label_annotations``, this field presents more + fine-grained, shot-level labels detected in video content and + is made available only when the client sets + ``LabelDetectionConfig.model`` to "builtin/latest" in the + request. frame_label_annotations: Label annotations on frame level. There is exactly one element for each unique label. @@ -3757,13 +3767,13 @@ Attributes: language_code: - *Required* The language of the supplied audio as a `BCP-47 - `__ language - tag. Example: "en-US". See `Language Support + Required. *Required* The language of the supplied audio as a + `BCP-47 `__ + language tag. Example: "en-US". See `Language Support `__ for a list of the currently supported language codes. max_alternatives: - *Optional* Maximum number of recognition hypotheses to be + Optional. Maximum number of recognition hypotheses to be returned. Specifically, the maximum number of ``SpeechRecognitionAlternative`` messages within each ``SpeechTranscription``. The server may return fewer than @@ -3771,16 +3781,16 @@ of ``0`` or ``1`` will return a maximum of one. If omitted, will return a maximum of one. filter_profanity: - *Optional* If set to ``true``, the server will attempt to + Optional. If set to ``true``, the server will attempt to filter out profanities, replacing all but the initial character in each filtered word with asterisks, e.g. "f\*\*\*". If set to ``false`` or omitted, profanities won't be filtered out. speech_contexts: - *Optional* A means to provide context to assist the speech + Optional. A means to provide context to assist the speech recognition. enable_automatic_punctuation: - *Optional* If 'true', adds punctuation to recognition result + Optional. If 'true', adds punctuation to recognition result hypotheses. This feature is only available in select languages. Setting this for requests in other languages has no effect at all. The default 'false' value does not add @@ -3789,11 +3799,11 @@ users. In the future this may be exclusively available as a premium feature." audio_tracks: - *Optional* For file formats, such as MXF or MKV, supporting + Optional. For file formats, such as MXF or MKV, supporting multiple audio tracks, specify up to two tracks. Default: track 0. enable_speaker_diarization: - *Optional* If 'true', enables speaker detection for each + Optional. If 'true', enables speaker detection for each recognized word in the top alternative of the recognition result using a speaker\_tag provided in the WordInfo. Note: When this is true, we send all the words from the beginning of @@ -3802,13 +3812,13 @@ as our models learn to identify the speakers in the conversation over time. diarization_speaker_count: - *Optional* If set, specifies the estimated number of speakers + Optional. If set, specifies the estimated number of speakers in the conversation. If not set, defaults to '2'. Ignored unless enable\_speaker\_diarization is set to true. enable_word_confidence: - *Optional* If ``true``, the top result includes a list of - words and the confidence for those words. If ``false``, no - word-level confidence information is returned. The default is + Optional. If ``true``, the top result includes a list of words + and the confidence for those words. If ``false``, no word- + level confidence information is returned. The default is ``false``. """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.SpeechTranscriptionConfig) @@ -3828,7 +3838,7 @@ Attributes: phrases: - *Optional* A list of strings containing words and phrases + Optional. A list of strings containing words and phrases "hints" so that the speech recognition is more likely to recognize them. This can be used to improve the accuracy for specific words and phrases, for example, if specific commands @@ -4124,6 +4134,26 @@ DESCRIPTOR._options = None +_ANNOTATEVIDEOREQUEST.fields_by_name["features"]._options = None +_ANNOTATEVIDEOREQUEST.fields_by_name["output_uri"]._options = None +_ANNOTATEVIDEOREQUEST.fields_by_name["location_id"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["language_code"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["max_alternatives"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["filter_profanity"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["speech_contexts"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name[ + "enable_automatic_punctuation" +]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["audio_tracks"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["enable_speaker_diarization"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["diarization_speaker_count"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["enable_word_confidence"]._options = None +_SPEECHCONTEXT.fields_by_name["phrases"]._options = None +_SPEECHTRANSCRIPTION.fields_by_name["language_code"]._options = None +_SPEECHRECOGNITIONALTERNATIVE.fields_by_name["confidence"]._options = None +_SPEECHRECOGNITIONALTERNATIVE.fields_by_name["words"]._options = None +_WORDINFO.fields_by_name["confidence"]._options = None +_WORDINFO.fields_by_name["speaker_tag"]._options = None _VIDEOINTELLIGENCESERVICE = _descriptor.ServiceDescriptor( name="VideoIntelligenceService", @@ -4133,8 +4163,8 @@ serialized_options=_b( "\312A videointelligence.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=7008, - serialized_end=7258, + serialized_start=7131, + serialized_end=7451, methods=[ _descriptor.MethodDescriptor( name="AnnotateVideo", @@ -4144,7 +4174,7 @@ input_type=_ANNOTATEVIDEOREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002\030"\023/v1/videos:annotate:\001*' + '\202\323\344\223\002\030"\023/v1/videos:annotate:\001*\332A\022input_uri,features\312A.\n\025AnnotateVideoResponse\022\025AnnotateVideoProgress' ), ) ], diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/enums.py b/videointelligence/google/cloud/videointelligence_v1beta1/gapic/enums.py deleted file mode 100644 index 7a13aca21272..000000000000 --- a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/enums.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class Feature(enum.IntEnum): - """ - Video annotation feature. - - Attributes: - FEATURE_UNSPECIFIED (int): Unspecified. - LABEL_DETECTION (int): Label detection. Detect objects, such as dog or flower. - FACE_DETECTION (int): Human face detection and tracking. - SHOT_CHANGE_DETECTION (int): Shot change detection. - SAFE_SEARCH_DETECTION (int): Safe search detection. - """ - - FEATURE_UNSPECIFIED = 0 - LABEL_DETECTION = 1 - FACE_DETECTION = 2 - SHOT_CHANGE_DETECTION = 3 - SAFE_SEARCH_DETECTION = 4 - - -class LabelDetectionMode(enum.IntEnum): - """ - Label detection mode. - - Attributes: - LABEL_DETECTION_MODE_UNSPECIFIED (int): Unspecified. - SHOT_MODE (int): Detect shot-level labels. - FRAME_MODE (int): Detect frame-level labels. - SHOT_AND_FRAME_MODE (int): Detect both shot-level and frame-level labels. - """ - - LABEL_DETECTION_MODE_UNSPECIFIED = 0 - SHOT_MODE = 1 - FRAME_MODE = 2 - SHOT_AND_FRAME_MODE = 3 - - -class LabelLevel(enum.IntEnum): - """ - Label level (scope). - - Attributes: - LABEL_LEVEL_UNSPECIFIED (int): Unspecified. - VIDEO_LEVEL (int): Video-level. Corresponds to the whole video. - SEGMENT_LEVEL (int): Segment-level. Corresponds to one of ``AnnotateSpec.segments``. - SHOT_LEVEL (int): Shot-level. Corresponds to a single shot (i.e. a series of frames - without a major camera position or background change). - FRAME_LEVEL (int): Frame-level. Corresponds to a single video frame. - """ - - LABEL_LEVEL_UNSPECIFIED = 0 - VIDEO_LEVEL = 1 - SEGMENT_LEVEL = 2 - SHOT_LEVEL = 3 - FRAME_LEVEL = 4 - - -class Likelihood(enum.IntEnum): - """ - Bucketized representation of likelihood. - - Attributes: - UNKNOWN (int): Unknown likelihood. - VERY_UNLIKELY (int): Very unlikely. - UNLIKELY (int): Unlikely. - POSSIBLE (int): Possible. - LIKELY (int): Likely. - VERY_LIKELY (int): Very likely. - """ - - UNKNOWN = 0 - VERY_UNLIKELY = 1 - UNLIKELY = 2 - POSSIBLE = 3 - LIKELY = 4 - VERY_LIKELY = 5 diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence.proto b/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence.proto deleted file mode 100644 index 430776bf0031..000000000000 --- a/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence.proto +++ /dev/null @@ -1,345 +0,0 @@ -// Copyright 2017 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.videointelligence.v1beta1; - -import "google/api/annotations.proto"; -import "google/longrunning/operations.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/status.proto"; - -option csharp_namespace = "Google.Cloud.VideoIntelligence.V1Beta1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1;videointelligence"; -option java_multiple_files = true; -option java_outer_classname = "VideoIntelligenceServiceProto"; -option java_package = "com.google.cloud.videointelligence.v1beta1"; -option php_namespace = "Google\\Cloud\\VideoIntelligence\\V1beta1"; -option ruby_package = "Google::Cloud::VideoIntelligence::V1beta1"; - -// Service that implements Google Cloud Video Intelligence API. -service VideoIntelligenceService { - // Performs asynchronous video annotation. Progress and results can be - // retrieved through the `google.longrunning.Operations` interface. - // `Operation.metadata` contains `AnnotateVideoProgress` (progress). - // `Operation.response` contains `AnnotateVideoResponse` (results). - rpc AnnotateVideo(AnnotateVideoRequest) - returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1beta1/videos:annotate" - body: "*" - }; - } -} - -// Video annotation request. -message AnnotateVideoRequest { - // Input video location. Currently, only - // [Google Cloud Storage](https://cloud.google.com/storage/) URIs are - // supported, which must be specified in the following format: - // `gs://bucket-id/object-id` (other URI formats return - // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For - // more information, see [Request URIs](/storage/docs/reference-uris). A video - // URI may include wildcards in `object-id`, and thus identify multiple - // videos. Supported wildcards: '*' to match 0 or more characters; - // '?' to match 1 character. If unset, the input video should be embedded - // in the request as `input_content`. If set, `input_content` should be unset. - string input_uri = 1; - - // The video data bytes. Encoding: base64. If unset, the input video(s) - // should be specified via `input_uri`. If set, `input_uri` should be unset. - string input_content = 6; - - // Requested video annotation features. - repeated Feature features = 2; - - // Additional video context and/or feature-specific parameters. - VideoContext video_context = 3; - - // Optional location where the output (in JSON format) should be stored. - // Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) - // URIs are supported, which must be specified in the following format: - // `gs://bucket-id/object-id` (other URI formats return - // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For - // more information, see [Request URIs](/storage/docs/reference-uris). - string output_uri = 4; - - // Optional cloud region where annotation should take place. Supported cloud - // regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region - // is specified, a region will be determined based on video file location. - string location_id = 5; -} - -// Video context and/or feature-specific parameters. -message VideoContext { - // Video segments to annotate. The segments may overlap and are not required - // to be contiguous or span the whole video. If unspecified, each video - // is treated as a single segment. - repeated VideoSegment segments = 1; - - // If label detection has been requested, what labels should be detected - // in addition to video-level labels or segment-level labels. If unspecified, - // defaults to `SHOT_MODE`. - LabelDetectionMode label_detection_mode = 2; - - // Whether the video has been shot from a stationary (i.e. non-moving) camera. - // When set to true, might improve detection accuracy for moving objects. - bool stationary_camera = 3; - - // Model to use for label detection. - // Supported values: "latest" and "stable" (the default). - string label_detection_model = 4; - - // Model to use for face detection. - // Supported values: "latest" and "stable" (the default). - string face_detection_model = 5; - - // Model to use for shot change detection. - // Supported values: "latest" and "stable" (the default). - string shot_change_detection_model = 6; - - // Model to use for safe search detection. - // Supported values: "latest" and "stable" (the default). - string safe_search_detection_model = 7; -} - -// Video segment. -message VideoSegment { - // Start offset in microseconds (inclusive). Unset means 0. - int64 start_time_offset = 1; - - // End offset in microseconds (inclusive). Unset means 0. - int64 end_time_offset = 2; -} - -// Label location. -message LabelLocation { - // Video segment. Set to [-1, -1] for video-level labels. - // Set to [timestamp, timestamp] for frame-level labels. - // Otherwise, corresponds to one of `AnnotateSpec.segments` - // (if specified) or to shot boundaries (if requested). - VideoSegment segment = 1; - - // Confidence that the label is accurate. Range: [0, 1]. - float confidence = 2; - - // Label level. - LabelLevel level = 3; -} - -// Label annotation. -message LabelAnnotation { - // Textual description, e.g. `Fixed-gear bicycle`. - string description = 1; - - // Language code for `description` in BCP-47 format. - string language_code = 2; - - // Where the label was detected and with what confidence. - repeated LabelLocation locations = 3; -} - -// Safe search annotation (based on per-frame visual signals only). -// If no unsafe content has been detected in a frame, no annotations -// are present for that frame. If only some types of unsafe content -// have been detected in a frame, the likelihood is set to `UNKNOWN` -// for all other types of unsafe content. -message SafeSearchAnnotation { - // Likelihood of adult content. - Likelihood adult = 1; - - // Likelihood that an obvious modification was made to the original - // version to make it appear funny or offensive. - Likelihood spoof = 2; - - // Likelihood of medical content. - Likelihood medical = 3; - - // Likelihood of violent content. - Likelihood violent = 4; - - // Likelihood of racy content. - Likelihood racy = 5; - - // Video time offset in microseconds. - int64 time_offset = 6; -} - -// Bounding box. -message BoundingBox { - // Left X coordinate. - int32 left = 1; - - // Right X coordinate. - int32 right = 2; - - // Bottom Y coordinate. - int32 bottom = 3; - - // Top Y coordinate. - int32 top = 4; -} - -// Face location. -message FaceLocation { - // Bounding box in a frame. - BoundingBox bounding_box = 1; - - // Video time offset in microseconds. - int64 time_offset = 2; -} - -// Face annotation. -message FaceAnnotation { - // Thumbnail of a representative face view (in JPEG format). Encoding: base64. - string thumbnail = 1; - - // All locations where a face was detected. - // Faces are detected and tracked on a per-video basis - // (as opposed to across multiple videos). - repeated VideoSegment segments = 2; - - // Face locations at one frame per second. - repeated FaceLocation locations = 3; -} - -// Annotation results for a single video. -message VideoAnnotationResults { - // Video file location in - // [Google Cloud Storage](https://cloud.google.com/storage/). - string input_uri = 1; - - // Label annotations. There is exactly one element for each unique label. - repeated LabelAnnotation label_annotations = 2; - - // Face annotations. There is exactly one element for each unique face. - repeated FaceAnnotation face_annotations = 3; - - // Shot annotations. Each shot is represented as a video segment. - repeated VideoSegment shot_annotations = 4; - - // Safe search annotations. - repeated SafeSearchAnnotation safe_search_annotations = 6; - - // If set, indicates an error. Note that for a single `AnnotateVideoRequest` - // some videos may succeed and some may fail. - google.rpc.Status error = 5; -} - -// Video annotation response. Included in the `response` -// field of the `Operation` returned by the `GetOperation` -// call of the `google::longrunning::Operations` service. -message AnnotateVideoResponse { - // Annotation results for all videos specified in `AnnotateVideoRequest`. - repeated VideoAnnotationResults annotation_results = 1; -} - -// Annotation progress for a single video. -message VideoAnnotationProgress { - // Video file location in - // [Google Cloud Storage](https://cloud.google.com/storage/). - string input_uri = 1; - - // Approximate percentage processed thus far. - // Guaranteed to be 100 when fully processed. - int32 progress_percent = 2; - - // Time when the request was received. - google.protobuf.Timestamp start_time = 3; - - // Time of the most recent update. - google.protobuf.Timestamp update_time = 4; -} - -// Video annotation progress. Included in the `metadata` -// field of the `Operation` returned by the `GetOperation` -// call of the `google::longrunning::Operations` service. -message AnnotateVideoProgress { - // Progress metadata for all videos specified in `AnnotateVideoRequest`. - repeated VideoAnnotationProgress annotation_progress = 1; -} - -// Video annotation feature. -enum Feature { - // Unspecified. - FEATURE_UNSPECIFIED = 0; - - // Label detection. Detect objects, such as dog or flower. - LABEL_DETECTION = 1; - - // Human face detection and tracking. - FACE_DETECTION = 2; - - // Shot change detection. - SHOT_CHANGE_DETECTION = 3; - - // Safe search detection. - SAFE_SEARCH_DETECTION = 4; -} - -// Label level (scope). -enum LabelLevel { - // Unspecified. - LABEL_LEVEL_UNSPECIFIED = 0; - - // Video-level. Corresponds to the whole video. - VIDEO_LEVEL = 1; - - // Segment-level. Corresponds to one of `AnnotateSpec.segments`. - SEGMENT_LEVEL = 2; - - // Shot-level. Corresponds to a single shot (i.e. a series of frames - // without a major camera position or background change). - SHOT_LEVEL = 3; - - // Frame-level. Corresponds to a single video frame. - FRAME_LEVEL = 4; -} - -// Label detection mode. -enum LabelDetectionMode { - // Unspecified. - LABEL_DETECTION_MODE_UNSPECIFIED = 0; - - // Detect shot-level labels. - SHOT_MODE = 1; - - // Detect frame-level labels. - FRAME_MODE = 2; - - // Detect both shot-level and frame-level labels. - SHOT_AND_FRAME_MODE = 3; -} - -// Bucketized representation of likelihood. -enum Likelihood { - // Unknown likelihood. - UNKNOWN = 0; - - // Very unlikely. - VERY_UNLIKELY = 1; - - // Unlikely. - UNLIKELY = 2; - - // Possible. - POSSIBLE = 3; - - // Likely. - LIKELY = 4; - - // Very likely. - VERY_LIKELY = 5; -} diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence_pb2.py b/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence_pb2.py deleted file mode 100644 index df9be878c05e..000000000000 --- a/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence_pb2.py +++ /dev/null @@ -1,1800 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/videointelligence_v1beta1/proto/video_intelligence.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/videointelligence_v1beta1/proto/video_intelligence.proto", - package="google.cloud.videointelligence.v1beta1", - syntax="proto3", - serialized_options=_b( - "\n*com.google.cloud.videointelligence.v1beta1B\035VideoIntelligenceServiceProtoP\001ZWgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1;videointelligence\252\002&Google.Cloud.VideoIntelligence.V1Beta1\312\002&Google\\Cloud\\VideoIntelligence\\V1beta1\352\002)Google::Cloud::VideoIntelligence::V1beta1" - ), - serialized_pb=_b( - '\nEgoogle/cloud/videointelligence_v1beta1/proto/video_intelligence.proto\x12&google.cloud.videointelligence.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xf9\x01\n\x14\x41nnotateVideoRequest\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x15\n\rinput_content\x18\x06 \x01(\t\x12\x41\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0e\x32/.google.cloud.videointelligence.v1beta1.Feature\x12K\n\rvideo_context\x18\x03 \x01(\x0b\x32\x34.google.cloud.videointelligence.v1beta1.VideoContext\x12\x12\n\noutput_uri\x18\x04 \x01(\t\x12\x13\n\x0blocation_id\x18\x05 \x01(\t"\xd2\x02\n\x0cVideoContext\x12\x46\n\x08segments\x18\x01 \x03(\x0b\x32\x34.google.cloud.videointelligence.v1beta1.VideoSegment\x12X\n\x14label_detection_mode\x18\x02 \x01(\x0e\x32:.google.cloud.videointelligence.v1beta1.LabelDetectionMode\x12\x19\n\x11stationary_camera\x18\x03 \x01(\x08\x12\x1d\n\x15label_detection_model\x18\x04 \x01(\t\x12\x1c\n\x14\x66\x61\x63\x65_detection_model\x18\x05 \x01(\t\x12#\n\x1bshot_change_detection_model\x18\x06 \x01(\t\x12#\n\x1bsafe_search_detection_model\x18\x07 \x01(\t"B\n\x0cVideoSegment\x12\x19\n\x11start_time_offset\x18\x01 \x01(\x03\x12\x17\n\x0f\x65nd_time_offset\x18\x02 \x01(\x03"\xad\x01\n\rLabelLocation\x12\x45\n\x07segment\x18\x01 \x01(\x0b\x32\x34.google.cloud.videointelligence.v1beta1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\x41\n\x05level\x18\x03 \x01(\x0e\x32\x32.google.cloud.videointelligence.v1beta1.LabelLevel"\x87\x01\n\x0fLabelAnnotation\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12\x15\n\rlanguage_code\x18\x02 \x01(\t\x12H\n\tlocations\x18\x03 \x03(\x0b\x32\x35.google.cloud.videointelligence.v1beta1.LabelLocation"\xfd\x02\n\x14SafeSearchAnnotation\x12\x41\n\x05\x61\x64ult\x18\x01 \x01(\x0e\x32\x32.google.cloud.videointelligence.v1beta1.Likelihood\x12\x41\n\x05spoof\x18\x02 \x01(\x0e\x32\x32.google.cloud.videointelligence.v1beta1.Likelihood\x12\x43\n\x07medical\x18\x03 \x01(\x0e\x32\x32.google.cloud.videointelligence.v1beta1.Likelihood\x12\x43\n\x07violent\x18\x04 \x01(\x0e\x32\x32.google.cloud.videointelligence.v1beta1.Likelihood\x12@\n\x04racy\x18\x05 \x01(\x0e\x32\x32.google.cloud.videointelligence.v1beta1.Likelihood\x12\x13\n\x0btime_offset\x18\x06 \x01(\x03"G\n\x0b\x42oundingBox\x12\x0c\n\x04left\x18\x01 \x01(\x05\x12\r\n\x05right\x18\x02 \x01(\x05\x12\x0e\n\x06\x62ottom\x18\x03 \x01(\x05\x12\x0b\n\x03top\x18\x04 \x01(\x05"n\n\x0c\x46\x61\x63\x65Location\x12I\n\x0c\x62ounding_box\x18\x01 \x01(\x0b\x32\x33.google.cloud.videointelligence.v1beta1.BoundingBox\x12\x13\n\x0btime_offset\x18\x02 \x01(\x03"\xb4\x01\n\x0e\x46\x61\x63\x65\x41nnotation\x12\x11\n\tthumbnail\x18\x01 \x01(\t\x12\x46\n\x08segments\x18\x02 \x03(\x0b\x32\x34.google.cloud.videointelligence.v1beta1.VideoSegment\x12G\n\tlocations\x18\x03 \x03(\x0b\x32\x34.google.cloud.videointelligence.v1beta1.FaceLocation"\xa3\x03\n\x16VideoAnnotationResults\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12R\n\x11label_annotations\x18\x02 \x03(\x0b\x32\x37.google.cloud.videointelligence.v1beta1.LabelAnnotation\x12P\n\x10\x66\x61\x63\x65_annotations\x18\x03 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1beta1.FaceAnnotation\x12N\n\x10shot_annotations\x18\x04 \x03(\x0b\x32\x34.google.cloud.videointelligence.v1beta1.VideoSegment\x12]\n\x17safe_search_annotations\x18\x06 \x03(\x0b\x32<.google.cloud.videointelligence.v1beta1.SafeSearchAnnotation\x12!\n\x05\x65rror\x18\x05 \x01(\x0b\x32\x12.google.rpc.Status"s\n\x15\x41nnotateVideoResponse\x12Z\n\x12\x61nnotation_results\x18\x01 \x03(\x0b\x32>.google.cloud.videointelligence.v1beta1.VideoAnnotationResults"\xa7\x01\n\x17VideoAnnotationProgress\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x18\n\x10progress_percent\x18\x02 \x01(\x05\x12.\n\nstart_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"u\n\x15\x41nnotateVideoProgress\x12\\\n\x13\x61nnotation_progress\x18\x01 \x03(\x0b\x32?.google.cloud.videointelligence.v1beta1.VideoAnnotationProgress*\x81\x01\n\x07\x46\x65\x61ture\x12\x17\n\x13\x46\x45\x41TURE_UNSPECIFIED\x10\x00\x12\x13\n\x0fLABEL_DETECTION\x10\x01\x12\x12\n\x0e\x46\x41\x43\x45_DETECTION\x10\x02\x12\x19\n\x15SHOT_CHANGE_DETECTION\x10\x03\x12\x19\n\x15SAFE_SEARCH_DETECTION\x10\x04*n\n\nLabelLevel\x12\x1b\n\x17LABEL_LEVEL_UNSPECIFIED\x10\x00\x12\x0f\n\x0bVIDEO_LEVEL\x10\x01\x12\x11\n\rSEGMENT_LEVEL\x10\x02\x12\x0e\n\nSHOT_LEVEL\x10\x03\x12\x0f\n\x0b\x46RAME_LEVEL\x10\x04*r\n\x12LabelDetectionMode\x12$\n LABEL_DETECTION_MODE_UNSPECIFIED\x10\x00\x12\r\n\tSHOT_MODE\x10\x01\x12\x0e\n\nFRAME_MODE\x10\x02\x12\x17\n\x13SHOT_AND_FRAME_MODE\x10\x03*e\n\nLikelihood\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05\x32\xae\x01\n\x18VideoIntelligenceService\x12\x91\x01\n\rAnnotateVideo\x12<.google.cloud.videointelligence.v1beta1.AnnotateVideoRequest\x1a\x1d.google.longrunning.Operation"#\x82\xd3\xe4\x93\x02\x1d"\x18/v1beta1/videos:annotate:\x01*B\xa4\x02\n*com.google.cloud.videointelligence.v1beta1B\x1dVideoIntelligenceServiceProtoP\x01ZWgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1;videointelligence\xaa\x02&Google.Cloud.VideoIntelligence.V1Beta1\xca\x02&Google\\Cloud\\VideoIntelligence\\V1beta1\xea\x02)Google::Cloud::VideoIntelligence::V1beta1b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - ], -) - -_FEATURE = _descriptor.EnumDescriptor( - name="Feature", - full_name="google.cloud.videointelligence.v1beta1.Feature", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="FEATURE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="LABEL_DETECTION", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="FACE_DETECTION", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SHOT_CHANGE_DETECTION", - index=3, - number=3, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="SAFE_SEARCH_DETECTION", - index=4, - number=4, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2794, - serialized_end=2923, -) -_sym_db.RegisterEnumDescriptor(_FEATURE) - -Feature = enum_type_wrapper.EnumTypeWrapper(_FEATURE) -_LABELLEVEL = _descriptor.EnumDescriptor( - name="LabelLevel", - full_name="google.cloud.videointelligence.v1beta1.LabelLevel", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="LABEL_LEVEL_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="VIDEO_LEVEL", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SEGMENT_LEVEL", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SHOT_LEVEL", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FRAME_LEVEL", index=4, number=4, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2925, - serialized_end=3035, -) -_sym_db.RegisterEnumDescriptor(_LABELLEVEL) - -LabelLevel = enum_type_wrapper.EnumTypeWrapper(_LABELLEVEL) -_LABELDETECTIONMODE = _descriptor.EnumDescriptor( - name="LabelDetectionMode", - full_name="google.cloud.videointelligence.v1beta1.LabelDetectionMode", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="LABEL_DETECTION_MODE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="SHOT_MODE", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FRAME_MODE", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SHOT_AND_FRAME_MODE", - index=3, - number=3, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3037, - serialized_end=3151, -) -_sym_db.RegisterEnumDescriptor(_LABELDETECTIONMODE) - -LabelDetectionMode = enum_type_wrapper.EnumTypeWrapper(_LABELDETECTIONMODE) -_LIKELIHOOD = _descriptor.EnumDescriptor( - name="Likelihood", - full_name="google.cloud.videointelligence.v1beta1.Likelihood", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="VERY_UNLIKELY", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="UNLIKELY", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="POSSIBLE", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="LIKELY", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="VERY_LIKELY", index=5, number=5, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3153, - serialized_end=3254, -) -_sym_db.RegisterEnumDescriptor(_LIKELIHOOD) - -Likelihood = enum_type_wrapper.EnumTypeWrapper(_LIKELIHOOD) -FEATURE_UNSPECIFIED = 0 -LABEL_DETECTION = 1 -FACE_DETECTION = 2 -SHOT_CHANGE_DETECTION = 3 -SAFE_SEARCH_DETECTION = 4 -LABEL_LEVEL_UNSPECIFIED = 0 -VIDEO_LEVEL = 1 -SEGMENT_LEVEL = 2 -SHOT_LEVEL = 3 -FRAME_LEVEL = 4 -LABEL_DETECTION_MODE_UNSPECIFIED = 0 -SHOT_MODE = 1 -FRAME_MODE = 2 -SHOT_AND_FRAME_MODE = 3 -UNKNOWN = 0 -VERY_UNLIKELY = 1 -UNLIKELY = 2 -POSSIBLE = 3 -LIKELY = 4 -VERY_LIKELY = 5 - - -_ANNOTATEVIDEOREQUEST = _descriptor.Descriptor( - name="AnnotateVideoRequest", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="input_uri", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest.input_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="input_content", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest.input_content", - index=1, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="features", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest.features", - index=2, - number=2, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="video_context", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest.video_context", - index=3, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="output_uri", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest.output_uri", - index=4, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="location_id", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest.location_id", - index=5, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=239, - serialized_end=488, -) - - -_VIDEOCONTEXT = _descriptor.Descriptor( - name="VideoContext", - full_name="google.cloud.videointelligence.v1beta1.VideoContext", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="segments", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.segments", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label_detection_mode", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.label_detection_mode", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="stationary_camera", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.stationary_camera", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label_detection_model", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.label_detection_model", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="face_detection_model", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.face_detection_model", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="shot_change_detection_model", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.shot_change_detection_model", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="safe_search_detection_model", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.safe_search_detection_model", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=491, - serialized_end=829, -) - - -_VIDEOSEGMENT = _descriptor.Descriptor( - name="VideoSegment", - full_name="google.cloud.videointelligence.v1beta1.VideoSegment", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="start_time_offset", - full_name="google.cloud.videointelligence.v1beta1.VideoSegment.start_time_offset", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time_offset", - full_name="google.cloud.videointelligence.v1beta1.VideoSegment.end_time_offset", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=831, - serialized_end=897, -) - - -_LABELLOCATION = _descriptor.Descriptor( - name="LabelLocation", - full_name="google.cloud.videointelligence.v1beta1.LabelLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="segment", - full_name="google.cloud.videointelligence.v1beta1.LabelLocation.segment", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="confidence", - full_name="google.cloud.videointelligence.v1beta1.LabelLocation.confidence", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="level", - full_name="google.cloud.videointelligence.v1beta1.LabelLocation.level", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=900, - serialized_end=1073, -) - - -_LABELANNOTATION = _descriptor.Descriptor( - name="LabelAnnotation", - full_name="google.cloud.videointelligence.v1beta1.LabelAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.videointelligence.v1beta1.LabelAnnotation.description", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="language_code", - full_name="google.cloud.videointelligence.v1beta1.LabelAnnotation.language_code", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="locations", - full_name="google.cloud.videointelligence.v1beta1.LabelAnnotation.locations", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1076, - serialized_end=1211, -) - - -_SAFESEARCHANNOTATION = _descriptor.Descriptor( - name="SafeSearchAnnotation", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="adult", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation.adult", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="spoof", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation.spoof", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="medical", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation.medical", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="violent", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation.violent", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="racy", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation.racy", - index=4, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_offset", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation.time_offset", - index=5, - number=6, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1214, - serialized_end=1595, -) - - -_BOUNDINGBOX = _descriptor.Descriptor( - name="BoundingBox", - full_name="google.cloud.videointelligence.v1beta1.BoundingBox", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="left", - full_name="google.cloud.videointelligence.v1beta1.BoundingBox.left", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="right", - full_name="google.cloud.videointelligence.v1beta1.BoundingBox.right", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bottom", - full_name="google.cloud.videointelligence.v1beta1.BoundingBox.bottom", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="top", - full_name="google.cloud.videointelligence.v1beta1.BoundingBox.top", - index=3, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1597, - serialized_end=1668, -) - - -_FACELOCATION = _descriptor.Descriptor( - name="FaceLocation", - full_name="google.cloud.videointelligence.v1beta1.FaceLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="bounding_box", - full_name="google.cloud.videointelligence.v1beta1.FaceLocation.bounding_box", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_offset", - full_name="google.cloud.videointelligence.v1beta1.FaceLocation.time_offset", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1670, - serialized_end=1780, -) - - -_FACEANNOTATION = _descriptor.Descriptor( - name="FaceAnnotation", - full_name="google.cloud.videointelligence.v1beta1.FaceAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="thumbnail", - full_name="google.cloud.videointelligence.v1beta1.FaceAnnotation.thumbnail", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="segments", - full_name="google.cloud.videointelligence.v1beta1.FaceAnnotation.segments", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="locations", - full_name="google.cloud.videointelligence.v1beta1.FaceAnnotation.locations", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1783, - serialized_end=1963, -) - - -_VIDEOANNOTATIONRESULTS = _descriptor.Descriptor( - name="VideoAnnotationResults", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="input_uri", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults.input_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label_annotations", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults.label_annotations", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="face_annotations", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults.face_annotations", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="shot_annotations", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults.shot_annotations", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="safe_search_annotations", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults.safe_search_annotations", - index=4, - number=6, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="error", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults.error", - index=5, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1966, - serialized_end=2385, -) - - -_ANNOTATEVIDEORESPONSE = _descriptor.Descriptor( - name="AnnotateVideoResponse", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_results", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoResponse.annotation_results", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2387, - serialized_end=2502, -) - - -_VIDEOANNOTATIONPROGRESS = _descriptor.Descriptor( - name="VideoAnnotationProgress", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationProgress", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="input_uri", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationProgress.input_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="progress_percent", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationProgress.progress_percent", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationProgress.start_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationProgress.update_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2505, - serialized_end=2672, -) - - -_ANNOTATEVIDEOPROGRESS = _descriptor.Descriptor( - name="AnnotateVideoProgress", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoProgress", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_progress", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoProgress.annotation_progress", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2674, - serialized_end=2791, -) - -_ANNOTATEVIDEOREQUEST.fields_by_name["features"].enum_type = _FEATURE -_ANNOTATEVIDEOREQUEST.fields_by_name["video_context"].message_type = _VIDEOCONTEXT -_VIDEOCONTEXT.fields_by_name["segments"].message_type = _VIDEOSEGMENT -_VIDEOCONTEXT.fields_by_name["label_detection_mode"].enum_type = _LABELDETECTIONMODE -_LABELLOCATION.fields_by_name["segment"].message_type = _VIDEOSEGMENT -_LABELLOCATION.fields_by_name["level"].enum_type = _LABELLEVEL -_LABELANNOTATION.fields_by_name["locations"].message_type = _LABELLOCATION -_SAFESEARCHANNOTATION.fields_by_name["adult"].enum_type = _LIKELIHOOD -_SAFESEARCHANNOTATION.fields_by_name["spoof"].enum_type = _LIKELIHOOD -_SAFESEARCHANNOTATION.fields_by_name["medical"].enum_type = _LIKELIHOOD -_SAFESEARCHANNOTATION.fields_by_name["violent"].enum_type = _LIKELIHOOD -_SAFESEARCHANNOTATION.fields_by_name["racy"].enum_type = _LIKELIHOOD -_FACELOCATION.fields_by_name["bounding_box"].message_type = _BOUNDINGBOX -_FACEANNOTATION.fields_by_name["segments"].message_type = _VIDEOSEGMENT -_FACEANNOTATION.fields_by_name["locations"].message_type = _FACELOCATION -_VIDEOANNOTATIONRESULTS.fields_by_name[ - "label_annotations" -].message_type = _LABELANNOTATION -_VIDEOANNOTATIONRESULTS.fields_by_name[ - "face_annotations" -].message_type = _FACEANNOTATION -_VIDEOANNOTATIONRESULTS.fields_by_name["shot_annotations"].message_type = _VIDEOSEGMENT -_VIDEOANNOTATIONRESULTS.fields_by_name[ - "safe_search_annotations" -].message_type = _SAFESEARCHANNOTATION -_VIDEOANNOTATIONRESULTS.fields_by_name[ - "error" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_ANNOTATEVIDEORESPONSE.fields_by_name[ - "annotation_results" -].message_type = _VIDEOANNOTATIONRESULTS -_VIDEOANNOTATIONPROGRESS.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_VIDEOANNOTATIONPROGRESS.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ANNOTATEVIDEOPROGRESS.fields_by_name[ - "annotation_progress" -].message_type = _VIDEOANNOTATIONPROGRESS -DESCRIPTOR.message_types_by_name["AnnotateVideoRequest"] = _ANNOTATEVIDEOREQUEST -DESCRIPTOR.message_types_by_name["VideoContext"] = _VIDEOCONTEXT -DESCRIPTOR.message_types_by_name["VideoSegment"] = _VIDEOSEGMENT -DESCRIPTOR.message_types_by_name["LabelLocation"] = _LABELLOCATION -DESCRIPTOR.message_types_by_name["LabelAnnotation"] = _LABELANNOTATION -DESCRIPTOR.message_types_by_name["SafeSearchAnnotation"] = _SAFESEARCHANNOTATION -DESCRIPTOR.message_types_by_name["BoundingBox"] = _BOUNDINGBOX -DESCRIPTOR.message_types_by_name["FaceLocation"] = _FACELOCATION -DESCRIPTOR.message_types_by_name["FaceAnnotation"] = _FACEANNOTATION -DESCRIPTOR.message_types_by_name["VideoAnnotationResults"] = _VIDEOANNOTATIONRESULTS -DESCRIPTOR.message_types_by_name["AnnotateVideoResponse"] = _ANNOTATEVIDEORESPONSE -DESCRIPTOR.message_types_by_name["VideoAnnotationProgress"] = _VIDEOANNOTATIONPROGRESS -DESCRIPTOR.message_types_by_name["AnnotateVideoProgress"] = _ANNOTATEVIDEOPROGRESS -DESCRIPTOR.enum_types_by_name["Feature"] = _FEATURE -DESCRIPTOR.enum_types_by_name["LabelLevel"] = _LABELLEVEL -DESCRIPTOR.enum_types_by_name["LabelDetectionMode"] = _LABELDETECTIONMODE -DESCRIPTOR.enum_types_by_name["Likelihood"] = _LIKELIHOOD -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -AnnotateVideoRequest = _reflection.GeneratedProtocolMessageType( - "AnnotateVideoRequest", - (_message.Message,), - dict( - DESCRIPTOR=_ANNOTATEVIDEOREQUEST, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Video annotation request. - - - Attributes: - input_uri: - Input video location. Currently, only `Google Cloud Storage - `__ URIs are supported, - which must be specified in the following format: - ``gs://bucket-id/object-id`` (other URI formats return [google - .rpc.Code.INVALID\_ARGUMENT][google.rpc.Code.INVALID\_ARGUMENT - ]). For more information, see `Request URIs - `__. A video URI may include - wildcards in ``object-id``, and thus identify multiple videos. - Supported wildcards: '\*' to match 0 or more characters; '?' - to match 1 character. If unset, the input video should be - embedded in the request as ``input_content``. If set, - ``input_content`` should be unset. - input_content: - The video data bytes. Encoding: base64. If unset, the input - video(s) should be specified via ``input_uri``. If set, - ``input_uri`` should be unset. - features: - Requested video annotation features. - video_context: - Additional video context and/or feature-specific parameters. - output_uri: - Optional location where the output (in JSON format) should be - stored. Currently, only `Google Cloud Storage - `__ URIs are supported, - which must be specified in the following format: - ``gs://bucket-id/object-id`` (other URI formats return [google - .rpc.Code.INVALID\_ARGUMENT][google.rpc.Code.INVALID\_ARGUMENT - ]). For more information, see `Request URIs - `__. - location_id: - Optional cloud region where annotation should take place. - Supported cloud regions: ``us-east1``, ``us-west1``, ``europe- - west1``, ``asia-east1``. If no region is specified, a region - will be determined based on video file location. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.AnnotateVideoRequest) - ), -) -_sym_db.RegisterMessage(AnnotateVideoRequest) - -VideoContext = _reflection.GeneratedProtocolMessageType( - "VideoContext", - (_message.Message,), - dict( - DESCRIPTOR=_VIDEOCONTEXT, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Video context and/or feature-specific parameters. - - - Attributes: - segments: - Video segments to annotate. The segments may overlap and are - not required to be contiguous or span the whole video. If - unspecified, each video is treated as a single segment. - label_detection_mode: - If label detection has been requested, what labels should be - detected in addition to video-level labels or segment-level - labels. If unspecified, defaults to ``SHOT_MODE``. - stationary_camera: - Whether the video has been shot from a stationary (i.e. non- - moving) camera. When set to true, might improve detection - accuracy for moving objects. - label_detection_model: - Model to use for label detection. Supported values: "latest" - and "stable" (the default). - face_detection_model: - Model to use for face detection. Supported values: "latest" - and "stable" (the default). - shot_change_detection_model: - Model to use for shot change detection. Supported values: - "latest" and "stable" (the default). - safe_search_detection_model: - Model to use for safe search detection. Supported values: - "latest" and "stable" (the default). - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoContext) - ), -) -_sym_db.RegisterMessage(VideoContext) - -VideoSegment = _reflection.GeneratedProtocolMessageType( - "VideoSegment", - (_message.Message,), - dict( - DESCRIPTOR=_VIDEOSEGMENT, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Video segment. - - - Attributes: - start_time_offset: - Start offset in microseconds (inclusive). Unset means 0. - end_time_offset: - End offset in microseconds (inclusive). Unset means 0. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoSegment) - ), -) -_sym_db.RegisterMessage(VideoSegment) - -LabelLocation = _reflection.GeneratedProtocolMessageType( - "LabelLocation", - (_message.Message,), - dict( - DESCRIPTOR=_LABELLOCATION, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Label location. - - - Attributes: - segment: - Video segment. Set to [-1, -1] for video-level labels. Set to - [timestamp, timestamp] for frame-level labels. Otherwise, - corresponds to one of ``AnnotateSpec.segments`` (if specified) - or to shot boundaries (if requested). - confidence: - Confidence that the label is accurate. Range: [0, 1]. - level: - Label level. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.LabelLocation) - ), -) -_sym_db.RegisterMessage(LabelLocation) - -LabelAnnotation = _reflection.GeneratedProtocolMessageType( - "LabelAnnotation", - (_message.Message,), - dict( - DESCRIPTOR=_LABELANNOTATION, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Label annotation. - - - Attributes: - description: - Textual description, e.g. ``Fixed-gear bicycle``. - language_code: - Language code for ``description`` in BCP-47 format. - locations: - Where the label was detected and with what confidence. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.LabelAnnotation) - ), -) -_sym_db.RegisterMessage(LabelAnnotation) - -SafeSearchAnnotation = _reflection.GeneratedProtocolMessageType( - "SafeSearchAnnotation", - (_message.Message,), - dict( - DESCRIPTOR=_SAFESEARCHANNOTATION, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Safe search annotation (based on per-frame visual signals only). If no - unsafe content has been detected in a frame, no annotations are present - for that frame. If only some types of unsafe content have been detected - in a frame, the likelihood is set to ``UNKNOWN`` for all other types of - unsafe content. - - - Attributes: - adult: - Likelihood of adult content. - spoof: - Likelihood that an obvious modification was made to the - original version to make it appear funny or offensive. - medical: - Likelihood of medical content. - violent: - Likelihood of violent content. - racy: - Likelihood of racy content. - time_offset: - Video time offset in microseconds. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.SafeSearchAnnotation) - ), -) -_sym_db.RegisterMessage(SafeSearchAnnotation) - -BoundingBox = _reflection.GeneratedProtocolMessageType( - "BoundingBox", - (_message.Message,), - dict( - DESCRIPTOR=_BOUNDINGBOX, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Bounding box. - - - Attributes: - left: - Left X coordinate. - right: - Right X coordinate. - bottom: - Bottom Y coordinate. - top: - Top Y coordinate. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.BoundingBox) - ), -) -_sym_db.RegisterMessage(BoundingBox) - -FaceLocation = _reflection.GeneratedProtocolMessageType( - "FaceLocation", - (_message.Message,), - dict( - DESCRIPTOR=_FACELOCATION, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Face location. - - - Attributes: - bounding_box: - Bounding box in a frame. - time_offset: - Video time offset in microseconds. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.FaceLocation) - ), -) -_sym_db.RegisterMessage(FaceLocation) - -FaceAnnotation = _reflection.GeneratedProtocolMessageType( - "FaceAnnotation", - (_message.Message,), - dict( - DESCRIPTOR=_FACEANNOTATION, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Face annotation. - - - Attributes: - thumbnail: - Thumbnail of a representative face view (in JPEG format). - Encoding: base64. - segments: - All locations where a face was detected. Faces are detected - and tracked on a per-video basis (as opposed to across - multiple videos). - locations: - Face locations at one frame per second. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.FaceAnnotation) - ), -) -_sym_db.RegisterMessage(FaceAnnotation) - -VideoAnnotationResults = _reflection.GeneratedProtocolMessageType( - "VideoAnnotationResults", - (_message.Message,), - dict( - DESCRIPTOR=_VIDEOANNOTATIONRESULTS, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Annotation results for a single video. - - - Attributes: - input_uri: - Video file location in `Google Cloud Storage - `__. - label_annotations: - Label annotations. There is exactly one element for each - unique label. - face_annotations: - Face annotations. There is exactly one element for each unique - face. - shot_annotations: - Shot annotations. Each shot is represented as a video segment. - safe_search_annotations: - Safe search annotations. - error: - If set, indicates an error. Note that for a single - ``AnnotateVideoRequest`` some videos may succeed and some may - fail. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoAnnotationResults) - ), -) -_sym_db.RegisterMessage(VideoAnnotationResults) - -AnnotateVideoResponse = _reflection.GeneratedProtocolMessageType( - "AnnotateVideoResponse", - (_message.Message,), - dict( - DESCRIPTOR=_ANNOTATEVIDEORESPONSE, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Video annotation response. Included in the ``response`` field of the - ``Operation`` returned by the ``GetOperation`` call of the - ``google::longrunning::Operations`` service. - - - Attributes: - annotation_results: - Annotation results for all videos specified in - ``AnnotateVideoRequest``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.AnnotateVideoResponse) - ), -) -_sym_db.RegisterMessage(AnnotateVideoResponse) - -VideoAnnotationProgress = _reflection.GeneratedProtocolMessageType( - "VideoAnnotationProgress", - (_message.Message,), - dict( - DESCRIPTOR=_VIDEOANNOTATIONPROGRESS, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Annotation progress for a single video. - - - Attributes: - input_uri: - Video file location in `Google Cloud Storage - `__. - progress_percent: - Approximate percentage processed thus far. Guaranteed to be - 100 when fully processed. - start_time: - Time when the request was received. - update_time: - Time of the most recent update. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoAnnotationProgress) - ), -) -_sym_db.RegisterMessage(VideoAnnotationProgress) - -AnnotateVideoProgress = _reflection.GeneratedProtocolMessageType( - "AnnotateVideoProgress", - (_message.Message,), - dict( - DESCRIPTOR=_ANNOTATEVIDEOPROGRESS, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Video annotation progress. Included in the ``metadata`` field of the - ``Operation`` returned by the ``GetOperation`` call of the - ``google::longrunning::Operations`` service. - - - Attributes: - annotation_progress: - Progress metadata for all videos specified in - ``AnnotateVideoRequest``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.AnnotateVideoProgress) - ), -) -_sym_db.RegisterMessage(AnnotateVideoProgress) - - -DESCRIPTOR._options = None - -_VIDEOINTELLIGENCESERVICE = _descriptor.ServiceDescriptor( - name="VideoIntelligenceService", - full_name="google.cloud.videointelligence.v1beta1.VideoIntelligenceService", - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=3257, - serialized_end=3431, - methods=[ - _descriptor.MethodDescriptor( - name="AnnotateVideo", - full_name="google.cloud.videointelligence.v1beta1.VideoIntelligenceService.AnnotateVideo", - index=0, - containing_service=None, - input_type=_ANNOTATEVIDEOREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - '\202\323\344\223\002\035"\030/v1beta1/videos:annotate:\001*' - ), - ) - ], -) -_sym_db.RegisterServiceDescriptor(_VIDEOINTELLIGENCESERVICE) - -DESCRIPTOR.services_by_name["VideoIntelligenceService"] = _VIDEOINTELLIGENCESERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence_pb2_grpc.py b/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence_pb2_grpc.py deleted file mode 100644 index a8a8f1125e20..000000000000 --- a/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence_pb2_grpc.py +++ /dev/null @@ -1,56 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.videointelligence_v1beta1.proto import ( - video_intelligence_pb2 as google_dot_cloud_dot_videointelligence__v1beta1_dot_proto_dot_video__intelligence__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) - - -class VideoIntelligenceServiceStub(object): - """Service that implements Google Cloud Video Intelligence API. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.AnnotateVideo = channel.unary_unary( - "/google.cloud.videointelligence.v1beta1.VideoIntelligenceService/AnnotateVideo", - request_serializer=google_dot_cloud_dot_videointelligence__v1beta1_dot_proto_dot_video__intelligence__pb2.AnnotateVideoRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - - -class VideoIntelligenceServiceServicer(object): - """Service that implements Google Cloud Video Intelligence API. - """ - - def AnnotateVideo(self, request, context): - """Performs asynchronous video annotation. Progress and results can be - retrieved through the `google.longrunning.Operations` interface. - `Operation.metadata` contains `AnnotateVideoProgress` (progress). - `Operation.response` contains `AnnotateVideoResponse` (results). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_VideoIntelligenceServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "AnnotateVideo": grpc.unary_unary_rpc_method_handler( - servicer.AnnotateVideo, - request_deserializer=google_dot_cloud_dot_videointelligence__v1beta1_dot_proto_dot_video__intelligence__pb2.AnnotateVideoRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ) - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.videointelligence.v1beta1.VideoIntelligenceService", - rpc_method_handlers, - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/videointelligence/google/cloud/videointelligence_v1p3beta1/gapic/enums.py b/videointelligence/google/cloud/videointelligence_v1p3beta1/gapic/enums.py index a78df64dbbc3..4aa081ac73a0 100644 --- a/videointelligence/google/cloud/videointelligence_v1p3beta1/gapic/enums.py +++ b/videointelligence/google/cloud/videointelligence_v1p3beta1/gapic/enums.py @@ -32,6 +32,7 @@ class Feature(enum.IntEnum): TEXT_DETECTION (int): OCR text detection and tracking. OBJECT_TRACKING (int): Object detection and tracking. LOGO_RECOGNITION (int): Logo detection, tracking, and recognition. + CELEBRITY_RECOGNITION (int): Celebrity recognition. """ FEATURE_UNSPECIFIED = 0 @@ -42,6 +43,7 @@ class Feature(enum.IntEnum): TEXT_DETECTION = 7 OBJECT_TRACKING = 9 LOGO_RECOGNITION = 12 + CELEBRITY_RECOGNITION = 13 class LabelDetectionMode(enum.IntEnum): diff --git a/videointelligence/google/cloud/videointelligence_v1p3beta1/gapic/streaming_video_intelligence_service_client.py b/videointelligence/google/cloud/videointelligence_v1p3beta1/gapic/streaming_video_intelligence_service_client.py index 2414631f29bd..6445359b9cce 100644 --- a/videointelligence/google/cloud/videointelligence_v1p3beta1/gapic/streaming_video_intelligence_service_client.py +++ b/videointelligence/google/cloud/videointelligence_v1p3beta1/gapic/streaming_video_intelligence_service_client.py @@ -46,7 +46,7 @@ class StreamingVideoIntelligenceServiceClient(object): - """Service that implements Google Cloud Video Intelligence Streaming API.""" + """Service that implements streaming Google Cloud Video Intelligence API.""" SERVICE_ADDRESS = "videointelligence.googleapis.com:443" """The default address of the service.""" diff --git a/videointelligence/google/cloud/videointelligence_v1p3beta1/gapic/video_intelligence_service_client.py b/videointelligence/google/cloud/videointelligence_v1p3beta1/gapic/video_intelligence_service_client.py index 786984ce04da..0175c889f53d 100644 --- a/videointelligence/google/cloud/videointelligence_v1p3beta1/gapic/video_intelligence_service_client.py +++ b/videointelligence/google/cloud/videointelligence_v1p3beta1/gapic/video_intelligence_service_client.py @@ -243,19 +243,19 @@ def annotate_video( request as ``input_content``. If set, ``input_content`` should be unset. input_content (bytes): The video data bytes. If unset, the input video(s) should be specified via ``input_uri``. If set, ``input_uri`` should be unset. - features (list[~google.cloud.videointelligence_v1p3beta1.types.Feature]): Requested video annotation features. + features (list[~google.cloud.videointelligence_v1p3beta1.types.Feature]): Required. Requested video annotation features. video_context (Union[dict, ~google.cloud.videointelligence_v1p3beta1.types.VideoContext]): Additional video context and/or feature-specific parameters. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.videointelligence_v1p3beta1.types.VideoContext` - output_uri (str): Optional location where the output (in JSON format) should be stored. + output_uri (str): Optional. Location where the output (in JSON format) should be stored. Currently, only `Google Cloud Storage `__ URIs are supported, which must be specified in the following format: ``gs://bucket-id/object-id`` (other URI formats return ``google.rpc.Code.INVALID_ARGUMENT``). For more information, see `Request URIs `__. - location_id (str): Optional cloud region where annotation should take place. Supported + location_id (str): Optional. Cloud region where annotation should take place. Supported cloud regions: ``us-east1``, ``us-west1``, ``europe-west1``, ``asia-east1``. If no region is specified, a region will be determined based on video file location. diff --git a/videointelligence/google/cloud/videointelligence_v1p3beta1/proto/video_intelligence.proto b/videointelligence/google/cloud/videointelligence_v1p3beta1/proto/video_intelligence.proto index e37726e0b1aa..1203b3152838 100644 --- a/videointelligence/google/cloud/videointelligence_v1p3beta1/proto/video_intelligence.proto +++ b/videointelligence/google/cloud/videointelligence_v1p3beta1/proto/video_intelligence.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,6 +18,8 @@ syntax = "proto3"; package google.cloud.videointelligence.v1p3beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; @@ -29,10 +31,13 @@ option java_multiple_files = true; option java_outer_classname = "VideoIntelligenceServiceProto"; option java_package = "com.google.cloud.videointelligence.v1p3beta1"; option php_namespace = "Google\\Cloud\\VideoIntelligence\\V1p3beta1"; -option ruby_package = "Google::Cloud::VideoIntelligence::V1p3beta1"; // Service that implements Google Cloud Video Intelligence API. service VideoIntelligenceService { + option (google.api.default_host) = "videointelligence.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform"; + // Performs asynchronous video annotation. Progress and results can be // retrieved through the `google.longrunning.Operations` interface. // `Operation.metadata` contains `AnnotateVideoProgress` (progress). @@ -43,16 +48,25 @@ service VideoIntelligenceService { post: "/v1p3beta1/videos:annotate" body: "*" }; + option (google.api.method_signature) = "input_uri,features"; + option (google.longrunning.operation_info) = { + response_type: "AnnotateVideoResponse" + metadata_type: "AnnotateVideoProgress" + }; } } -// Service that implements Google Cloud Video Intelligence Streaming API. +// Service that implements streaming Google Cloud Video Intelligence API. service StreamingVideoIntelligenceService { + option (google.api.default_host) = "videointelligence.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform"; + // Performs video annotation with bidirectional streaming: emitting results // while sending video/audio bytes. // This method is only available via the gRPC API (not REST). rpc StreamingAnnotateVideo(stream StreamingAnnotateVideoRequest) - returns (stream StreamingAnnotateVideoResponse); + returns (stream StreamingAnnotateVideoResponse) {} } // Video annotation request. @@ -74,24 +88,24 @@ message AnnotateVideoRequest { // If set, `input_uri` should be unset. bytes input_content = 6; - // Requested video annotation features. - repeated Feature features = 2; + // Required. Requested video annotation features. + repeated Feature features = 2 [(google.api.field_behavior) = REQUIRED]; // Additional video context and/or feature-specific parameters. VideoContext video_context = 3; - // Optional location where the output (in JSON format) should be stored. + // Optional. Location where the output (in JSON format) should be stored. // Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) // URIs are supported, which must be specified in the following format: // `gs://bucket-id/object-id` (other URI formats return // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For // more information, see [Request URIs](/storage/docs/reference-uris). - string output_uri = 4; + string output_uri = 4 [(google.api.field_behavior) = OPTIONAL]; - // Optional cloud region where annotation should take place. Supported cloud + // Optional. Cloud region where annotation should take place. Supported cloud // regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region // is specified, a region will be determined based on video file location. - string location_id = 5; + string location_id = 5 [(google.api.field_behavior) = OPTIONAL]; } // Video context and/or feature-specific parameters. @@ -290,8 +304,7 @@ message NormalizedBoundingBox { float bottom = 4; } -// For tracking related features, such as LOGO_RECOGNITION, FACE_DETECTION, -// CELEBRITY_RECOGNITION, PERSON_DETECTION. +// For tracking related features. // An object at time_offset with attributes, and located with // normalized_bounding_box. message TimestampedObject { @@ -303,7 +316,8 @@ message TimestampedObject { google.protobuf.Duration time_offset = 2; // Optional. The attributes of the object in the bounding box. - repeated DetectedAttribute attributes = 3; + repeated DetectedAttribute attributes = 3 + [(google.api.field_behavior) = OPTIONAL]; } // A track of an object instance. @@ -315,10 +329,11 @@ message Track { repeated TimestampedObject timestamped_objects = 2; // Optional. Attributes in the track level. - repeated DetectedAttribute attributes = 3; + repeated DetectedAttribute attributes = 3 + [(google.api.field_behavior) = OPTIONAL]; // Optional. The confidence score of the tracked object. - float confidence = 4; + float confidence = 4 [(google.api.field_behavior) = OPTIONAL]; } // A generic detected attribute represented by name in string format. @@ -335,20 +350,80 @@ message DetectedAttribute { string value = 3; } +// Celebrity definition. +message Celebrity { + // The resource name of the celebrity. Have the format + // `video-intelligence/kg-mid` indicates a celebrity from preloaded gallery. + // kg-mid is the id in Google knowledge graph, which is unique for the + // celebrity. + string name = 1; + + // The celebrity name. + string display_name = 2; + + // Textual description of additional information about the celebrity, if + // applicable. + string description = 3; +} + +// The annotation result of a celebrity face track. RecognizedCelebrity field +// could be empty if the face track does not have any matched celebrities. +message CelebrityTrack { + // The recognized celebrity with confidence score. + message RecognizedCelebrity { + // The recognized celebrity. + Celebrity celebrity = 1; + + // Recognition confidence. Range [0, 1]. + float confidence = 2; + } + + // Top N match of the celebrities for the face in this track. + repeated RecognizedCelebrity celebrities = 1; + + // A track of a person's face. + Track face_track = 3; +} + +// Celebrity recognition annotation per video. +message CelebrityRecognitionAnnotation { + // The tracks detected from the input video, including recognized celebrities + // and other detected faces in the video. + repeated CelebrityTrack celebrity_tracks = 1; +} + // Annotation results for a single video. message VideoAnnotationResults { // Video file location in // [Google Cloud Storage](https://cloud.google.com/storage/). string input_uri = 1; - // Label annotations on video level or user specified segment level. + // Video segment on which the annotation is run. + VideoSegment segment = 10; + + // Topical label annotations on video level or user specified segment level. // There is exactly one element for each unique label. repeated LabelAnnotation segment_label_annotations = 2; - // Label annotations on shot level. + // Presence label annotations on video level or user specified segment level. + // There is exactly one element for each unique label. Compared to the + // existing topical `segment_label_annotations`, this field presents more + // fine-grained, segment-level labels detected in video content and is made + // available only when the client sets `LabelDetectionConfig.model` to + // "builtin/latest" in the request. + repeated LabelAnnotation segment_presence_label_annotations = 23; + + // Topical label annotations on shot level. // There is exactly one element for each unique label. repeated LabelAnnotation shot_label_annotations = 3; + // Presence label annotations on shot level. There is exactly one element for + // each unique label. Compared to the existing topical + // `shot_label_annotations`, this field presents more fine-grained, shot-level + // labels detected in video content and is made available only when the client + // sets `LabelDetectionConfig.model` to "builtin/latest" in the request. + repeated LabelAnnotation shot_presence_label_annotations = 24; + // Label annotations on frame level. // There is exactly one element for each unique label. repeated LabelAnnotation frame_label_annotations = 4; @@ -373,6 +448,9 @@ message VideoAnnotationResults { // Annotations for list of logos detected, tracked and recognized in video. repeated LogoRecognitionAnnotation logo_recognition_annotations = 19; + // Celebrity recognition annotations. + CelebrityRecognitionAnnotation celebrity_recognition_annotations = 21; + // If set, indicates an error. Note that for a single `AnnotateVideoRequest` // some videos may succeed and some may fail. google.rpc.Status error = 9; @@ -401,6 +479,14 @@ message VideoAnnotationProgress { // Time of the most recent update. google.protobuf.Timestamp update_time = 4; + + // Specifies which feature is being tracked if the request contains more than + // one features. + Feature feature = 5; + + // Specifies which segment is being tracked if the request contains more than + // one segments. + VideoSegment segment = 6; } // Video annotation progress. Included in the `metadata` @@ -413,72 +499,73 @@ message AnnotateVideoProgress { // Config for SPEECH_TRANSCRIPTION. message SpeechTranscriptionConfig { - // *Required* The language of the supplied audio as a + // Required. *Required* The language of the supplied audio as a // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. // Example: "en-US". // See [Language Support](https://cloud.google.com/speech/docs/languages) // for a list of the currently supported language codes. - string language_code = 1; + string language_code = 1 [(google.api.field_behavior) = REQUIRED]; - // *Optional* Maximum number of recognition hypotheses to be returned. + // Optional. Maximum number of recognition hypotheses to be returned. // Specifically, the maximum number of `SpeechRecognitionAlternative` messages // within each `SpeechTranscription`. The server may return fewer than // `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will // return a maximum of one. If omitted, will return a maximum of one. - int32 max_alternatives = 2; + int32 max_alternatives = 2 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* If set to `true`, the server will attempt to filter out + // Optional. If set to `true`, the server will attempt to filter out // profanities, replacing all but the initial character in each filtered word // with asterisks, e.g. "f***". If set to `false` or omitted, profanities // won't be filtered out. - bool filter_profanity = 3; + bool filter_profanity = 3 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* A means to provide context to assist the speech recognition. - repeated SpeechContext speech_contexts = 4; + // Optional. A means to provide context to assist the speech recognition. + repeated SpeechContext speech_contexts = 4 + [(google.api.field_behavior) = OPTIONAL]; - // *Optional* If 'true', adds punctuation to recognition result hypotheses. + // Optional. If 'true', adds punctuation to recognition result hypotheses. // This feature is only available in select languages. Setting this for // requests in other languages has no effect at all. The default 'false' value // does not add punctuation to result hypotheses. NOTE: "This is currently // offered as an experimental service, complimentary to all users. In the // future this may be exclusively available as a premium feature." - bool enable_automatic_punctuation = 5; + bool enable_automatic_punctuation = 5 + [(google.api.field_behavior) = OPTIONAL]; - // *Optional* For file formats, such as MXF or MKV, supporting multiple audio + // Optional. For file formats, such as MXF or MKV, supporting multiple audio // tracks, specify up to two tracks. Default: track 0. - repeated int32 audio_tracks = 6; + repeated int32 audio_tracks = 6 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* If 'true', enables speaker detection for each recognized word in + // Optional. If 'true', enables speaker detection for each recognized word in // the top alternative of the recognition result using a speaker_tag provided // in the WordInfo. // Note: When this is true, we send all the words from the beginning of the // audio for the top alternative in every consecutive responses. // This is done in order to improve our speaker tags as our models learn to // identify the speakers in the conversation over time. - bool enable_speaker_diarization = 7; + bool enable_speaker_diarization = 7 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* - // If set, specifies the estimated number of speakers in the conversation. - // If not set, defaults to '2'. - // Ignored unless enable_speaker_diarization is set to true. - int32 diarization_speaker_count = 8; + // Optional. If set, specifies the estimated number of speakers in the + // conversation. If not set, defaults to '2'. Ignored unless + // enable_speaker_diarization is set to true. + int32 diarization_speaker_count = 8 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* If `true`, the top result includes a list of words and the + // Optional. If `true`, the top result includes a list of words and the // confidence for those words. If `false`, no word-level confidence // information is returned. The default is `false`. - bool enable_word_confidence = 9; + bool enable_word_confidence = 9 [(google.api.field_behavior) = OPTIONAL]; } // Provides "hints" to the speech recognizer to favor specific words and phrases // in the results. message SpeechContext { - // *Optional* A list of strings containing words and phrases "hints" so that + // Optional. A list of strings containing words and phrases "hints" so that // the speech recognition is more likely to recognize them. This can be used // to improve the accuracy for specific words and phrases, for example, if // specific commands are typically spoken by the user. This can also be used // to add additional words to the vocabulary of the recognizer. See // [usage limits](https://cloud.google.com/speech/limits#content). - repeated string phrases = 1; + repeated string phrases = 1 [(google.api.field_behavior) = OPTIONAL]; } // A speech recognition result corresponding to a portion of the audio. @@ -489,11 +576,10 @@ message SpeechTranscription { // ranked by the recognizer. repeated SpeechRecognitionAlternative alternatives = 1; - // Output only. The - // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the - // language in this result. This language code was detected to have the most - // likelihood of being spoken in the audio. - string language_code = 2; + // Output only. The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) + // language tag of the language in this result. This language code was + // detected to have the most likelihood of being spoken in the audio. + string language_code = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Alternative hypotheses (a.k.a. n-best list). @@ -501,16 +587,18 @@ message SpeechRecognitionAlternative { // Transcript text representing the words that the user spoke. string transcript = 1; - // The confidence estimate between 0.0 and 1.0. A higher number + // Output only. The confidence estimate between 0.0 and 1.0. A higher number // indicates an estimated greater likelihood that the recognized words are - // correct. This field is typically provided only for the top hypothesis, and - // only for `is_final=true` results. Clients should not rely on the - // `confidence` field as it is not guaranteed to be accurate or consistent. + // correct. This field is set only for the top alternative. + // This field is not guaranteed to be accurate and users should not rely on it + // to be always provided. // The default of 0.0 is a sentinel value indicating `confidence` was not set. - float confidence = 2; + float confidence = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - // A list of word-specific information for each recognized word. - repeated WordInfo words = 3; + // Output only. A list of word-specific information for each recognized word. + // Note: When `enable_speaker_diarization` is true, you will see all the words + // from the beginning of the audio. + repeated WordInfo words = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Word-specific information for recognized words. Word information is only @@ -538,13 +626,13 @@ message WordInfo { // This field is not guaranteed to be accurate and users should not rely on it // to be always provided. // The default of 0.0 is a sentinel value indicating `confidence` was not set. - float confidence = 4; + float confidence = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A distinct integer value is assigned for every speaker within // the audio. This field specifies which one of those speakers was detected to // have spoken this word. Value ranges from 1 up to diarization_speaker_count, // and is only set if speaker diarization is enabled. - int32 speaker_tag = 5; + int32 speaker_tag = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A vertex represents a 2D point in the image. @@ -645,6 +733,7 @@ message ObjectTrackingAnnotation { // Non-streaming batch mode ONLY. // Each object track corresponds to one video segment where it appears. VideoSegment segment = 3; + // Streaming mode ONLY. // In streaming mode, we do not know the end time of a tracked object // before it is completed. Hence, there is no VideoSegment info returned. @@ -712,26 +801,24 @@ message StreamingAnnotateVideoResponse { string annotation_results_uri = 3; } -// Config for AUTOML_CLASSIFICATION in streaming mode. +// Config for STREAMING_AUTOML_CLASSIFICATION. message StreamingAutomlClassificationConfig { // Resource name of AutoML model. // Format: `projects/{project_id}/locations/{location_id}/models/{model_id}` string model_name = 1; } -// Config for AUTOML_OBJECT_TRACKING in streaming mode. +// Config for STREAMING_AUTOML_OBJECT_TRACKING. message StreamingAutomlObjectTrackingConfig { // Resource name of AutoML model. // Format: `projects/{project_id}/locations/{location_id}/models/{model_id}` string model_name = 1; } -// Config for EXPLICIT_CONTENT_DETECTION in streaming mode. -message StreamingExplicitContentDetectionConfig { - // No customized config support. -} +// Config for STREAMING_EXPLICIT_CONTENT_DETECTION. +message StreamingExplicitContentDetectionConfig {} -// Config for LABEL_DETECTION in streaming mode. +// Config for STREAMING_LABEL_DETECTION. message StreamingLabelDetectionConfig { // Whether the video has been captured from a stationary (i.e. non-moving) // camera. When set to true, might improve detection accuracy for moving @@ -740,14 +827,10 @@ message StreamingLabelDetectionConfig { } // Config for STREAMING_OBJECT_TRACKING. -message StreamingObjectTrackingConfig { - // No customized config support. -} +message StreamingObjectTrackingConfig {} -// Config for SHOT_CHANGE_DETECTION in streaming mode. -message StreamingShotChangeDetectionConfig { - // No customized config support. -} +// Config for STREAMING_SHOT_CHANGE_DETECTION. +message StreamingShotChangeDetectionConfig {} // Config for streaming storage option. message StreamingStorageConfig { @@ -840,6 +923,9 @@ enum Feature { // Logo detection, tracking, and recognition. LOGO_RECOGNITION = 12; + + // Celebrity recognition. + CELEBRITY_RECOGNITION = 13; } // Label detection mode. @@ -882,16 +968,22 @@ enum Likelihood { enum StreamingFeature { // Unspecified. STREAMING_FEATURE_UNSPECIFIED = 0; + // Label detection. Detect objects, such as dog or flower. STREAMING_LABEL_DETECTION = 1; + // Shot change detection. STREAMING_SHOT_CHANGE_DETECTION = 2; + // Explicit content detection. STREAMING_EXPLICIT_CONTENT_DETECTION = 3; + // Object detection and tracking. STREAMING_OBJECT_TRACKING = 4; + // Video classification based on AutoML model. STREAMING_AUTOML_CLASSIFICATION = 21; + // Object detection and tracking based on AutoML model. STREAMING_AUTOML_OBJECT_TRACKING = 22; } diff --git a/videointelligence/google/cloud/videointelligence_v1p3beta1/proto/video_intelligence_pb2.py b/videointelligence/google/cloud/videointelligence_v1p3beta1/proto/video_intelligence_pb2.py index 517845e5a7c9..fd02c615e624 100644 --- a/videointelligence/google/cloud/videointelligence_v1p3beta1/proto/video_intelligence_pb2.py +++ b/videointelligence/google/cloud/videointelligence_v1p3beta1/proto/video_intelligence_pb2.py @@ -17,6 +17,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.longrunning import ( operations_pb2 as google_dot_longrunning_dot_operations__pb2, ) @@ -30,13 +32,15 @@ package="google.cloud.videointelligence.v1p3beta1", syntax="proto3", serialized_options=_b( - "\n,com.google.cloud.videointelligence.v1p3beta1B\035VideoIntelligenceServiceProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1p3beta1;videointelligence\252\002(Google.Cloud.VideoIntelligence.V1P3Beta1\312\002(Google\\Cloud\\VideoIntelligence\\V1p3beta1\352\002+Google::Cloud::VideoIntelligence::V1p3beta1" + "\n,com.google.cloud.videointelligence.v1p3beta1B\035VideoIntelligenceServiceProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1p3beta1;videointelligence\252\002(Google.Cloud.VideoIntelligence.V1P3Beta1\312\002(Google\\Cloud\\VideoIntelligence\\V1p3beta1" ), serialized_pb=_b( - '\nGgoogle/cloud/videointelligence_v1p3beta1/proto/video_intelligence.proto\x12(google.cloud.videointelligence.v1p3beta1\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xfd\x01\n\x14\x41nnotateVideoRequest\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x15\n\rinput_content\x18\x06 \x01(\x0c\x12\x43\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0e\x32\x31.google.cloud.videointelligence.v1p3beta1.Feature\x12M\n\rvideo_context\x18\x03 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoContext\x12\x12\n\noutput_uri\x18\x04 \x01(\t\x12\x13\n\x0blocation_id\x18\x05 \x01(\t"\xc0\x05\n\x0cVideoContext\x12H\n\x08segments\x18\x01 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12^\n\x16label_detection_config\x18\x02 \x01(\x0b\x32>.google.cloud.videointelligence.v1p3beta1.LabelDetectionConfig\x12i\n\x1cshot_change_detection_config\x18\x03 \x01(\x0b\x32\x43.google.cloud.videointelligence.v1p3beta1.ShotChangeDetectionConfig\x12s\n!explicit_content_detection_config\x18\x04 \x01(\x0b\x32H.google.cloud.videointelligence.v1p3beta1.ExplicitContentDetectionConfig\x12h\n\x1bspeech_transcription_config\x18\x06 \x01(\x0b\x32\x43.google.cloud.videointelligence.v1p3beta1.SpeechTranscriptionConfig\x12\\\n\x15text_detection_config\x18\x08 \x01(\x0b\x32=.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig\x12^\n\x16object_tracking_config\x18\r \x01(\x0b\x32>.google.cloud.videointelligence.v1p3beta1.ObjectTrackingConfig"\xe4\x01\n\x14LabelDetectionConfig\x12Z\n\x14label_detection_mode\x18\x01 \x01(\x0e\x32<.google.cloud.videointelligence.v1p3beta1.LabelDetectionMode\x12\x19\n\x11stationary_camera\x18\x02 \x01(\x08\x12\r\n\x05model\x18\x03 \x01(\t\x12"\n\x1a\x66rame_confidence_threshold\x18\x04 \x01(\x02\x12"\n\x1avideo_confidence_threshold\x18\x05 \x01(\x02"*\n\x19ShotChangeDetectionConfig\x12\r\n\x05model\x18\x01 \x01(\t"%\n\x14ObjectTrackingConfig\x12\r\n\x05model\x18\x01 \x01(\t"/\n\x1e\x45xplicitContentDetectionConfig\x12\r\n\x05model\x18\x01 \x01(\t"<\n\x13TextDetectionConfig\x12\x16\n\x0elanguage_hints\x18\x01 \x03(\t\x12\r\n\x05model\x18\x02 \x01(\t"x\n\x0cVideoSegment\x12\x34\n\x11start_time_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0f\x65nd_time_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"k\n\x0cLabelSegment\x12G\n\x07segment\x18\x01 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02"P\n\nLabelFrame\x12.\n\x0btime_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x12\n\nconfidence\x18\x02 \x01(\x02"G\n\x06\x45ntity\x12\x11\n\tentity_id\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x15\n\rlanguage_code\x18\x03 \x01(\t"\xb0\x02\n\x0fLabelAnnotation\x12@\n\x06\x65ntity\x18\x01 \x01(\x0b\x32\x30.google.cloud.videointelligence.v1p3beta1.Entity\x12K\n\x11\x63\x61tegory_entities\x18\x02 \x03(\x0b\x32\x30.google.cloud.videointelligence.v1p3beta1.Entity\x12H\n\x08segments\x18\x03 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.LabelSegment\x12\x44\n\x06\x66rames\x18\x04 \x03(\x0b\x32\x34.google.cloud.videointelligence.v1p3beta1.LabelFrame"\x9c\x01\n\x14\x45xplicitContentFrame\x12.\n\x0btime_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12T\n\x16pornography_likelihood\x18\x02 \x01(\x0e\x32\x34.google.cloud.videointelligence.v1p3beta1.Likelihood"k\n\x19\x45xplicitContentAnnotation\x12N\n\x06\x66rames\x18\x01 \x03(\x0b\x32>.google.cloud.videointelligence.v1p3beta1.ExplicitContentFrame"Q\n\x15NormalizedBoundingBox\x12\x0c\n\x04left\x18\x01 \x01(\x02\x12\x0b\n\x03top\x18\x02 \x01(\x02\x12\r\n\x05right\x18\x03 \x01(\x02\x12\x0e\n\x06\x62ottom\x18\x04 \x01(\x02"\xf6\x01\n\x11TimestampedObject\x12`\n\x17normalized_bounding_box\x18\x01 \x01(\x0b\x32?.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12O\n\nattributes\x18\x03 \x03(\x0b\x32;.google.cloud.videointelligence.v1p3beta1.DetectedAttribute"\x8f\x02\n\x05Track\x12G\n\x07segment\x18\x01 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12X\n\x13timestamped_objects\x18\x02 \x03(\x0b\x32;.google.cloud.videointelligence.v1p3beta1.TimestampedObject\x12O\n\nattributes\x18\x03 \x03(\x0b\x32;.google.cloud.videointelligence.v1p3beta1.DetectedAttribute\x12\x12\n\nconfidence\x18\x04 \x01(\x02"D\n\x11\x44\x65tectedAttribute\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\r\n\x05value\x18\x03 \x01(\t"\x94\x07\n\x16VideoAnnotationResults\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\\\n\x19segment_label_annotations\x18\x02 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1p3beta1.LabelAnnotation\x12Y\n\x16shot_label_annotations\x18\x03 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1p3beta1.LabelAnnotation\x12Z\n\x17\x66rame_label_annotations\x18\x04 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1p3beta1.LabelAnnotation\x12P\n\x10shot_annotations\x18\x06 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12`\n\x13\x65xplicit_annotation\x18\x07 \x01(\x0b\x32\x43.google.cloud.videointelligence.v1p3beta1.ExplicitContentAnnotation\x12\\\n\x15speech_transcriptions\x18\x0b \x03(\x0b\x32=.google.cloud.videointelligence.v1p3beta1.SpeechTranscription\x12R\n\x10text_annotations\x18\x0c \x03(\x0b\x32\x38.google.cloud.videointelligence.v1p3beta1.TextAnnotation\x12^\n\x12object_annotations\x18\x0e \x03(\x0b\x32\x42.google.cloud.videointelligence.v1p3beta1.ObjectTrackingAnnotation\x12i\n\x1clogo_recognition_annotations\x18\x13 \x03(\x0b\x32\x43.google.cloud.videointelligence.v1p3beta1.LogoRecognitionAnnotation\x12!\n\x05\x65rror\x18\t \x01(\x0b\x32\x12.google.rpc.Status"u\n\x15\x41nnotateVideoResponse\x12\\\n\x12\x61nnotation_results\x18\x01 \x03(\x0b\x32@.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults"\xa7\x01\n\x17VideoAnnotationProgress\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x18\n\x10progress_percent\x18\x02 \x01(\x05\x12.\n\nstart_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"w\n\x15\x41nnotateVideoProgress\x12^\n\x13\x61nnotation_progress\x18\x01 \x03(\x0b\x32\x41.google.cloud.videointelligence.v1p3beta1.VideoAnnotationProgress"\xdb\x02\n\x19SpeechTranscriptionConfig\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x18\n\x10max_alternatives\x18\x02 \x01(\x05\x12\x18\n\x10\x66ilter_profanity\x18\x03 \x01(\x08\x12P\n\x0fspeech_contexts\x18\x04 \x03(\x0b\x32\x37.google.cloud.videointelligence.v1p3beta1.SpeechContext\x12$\n\x1c\x65nable_automatic_punctuation\x18\x05 \x01(\x08\x12\x14\n\x0c\x61udio_tracks\x18\x06 \x03(\x05\x12"\n\x1a\x65nable_speaker_diarization\x18\x07 \x01(\x08\x12!\n\x19\x64iarization_speaker_count\x18\x08 \x01(\x05\x12\x1e\n\x16\x65nable_word_confidence\x18\t \x01(\x08" \n\rSpeechContext\x12\x0f\n\x07phrases\x18\x01 \x03(\t"\x8a\x01\n\x13SpeechTranscription\x12\\\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x46.google.cloud.videointelligence.v1p3beta1.SpeechRecognitionAlternative\x12\x15\n\rlanguage_code\x18\x02 \x01(\t"\x89\x01\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\x41\n\x05words\x18\x03 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1p3beta1.WordInfo"\x9d\x01\n\x08WordInfo\x12-\n\nstart_time\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12+\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0c\n\x04word\x18\x03 \x01(\t\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x12\x13\n\x0bspeaker_tag\x18\x05 \x01(\x05"(\n\x10NormalizedVertex\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02"f\n\x16NormalizedBoundingPoly\x12L\n\x08vertices\x18\x01 \x03(\x0b\x32:.google.cloud.videointelligence.v1p3beta1.NormalizedVertex"\xaf\x01\n\x0bTextSegment\x12G\n\x07segment\x18\x01 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\x43\n\x06\x66rames\x18\x03 \x03(\x0b\x32\x33.google.cloud.videointelligence.v1p3beta1.TextFrame"\x9b\x01\n\tTextFrame\x12^\n\x14rotated_bounding_box\x18\x01 \x01(\x0b\x32@.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingPoly\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"g\n\x0eTextAnnotation\x12\x0c\n\x04text\x18\x01 \x01(\t\x12G\n\x08segments\x18\x02 \x03(\x0b\x32\x35.google.cloud.videointelligence.v1p3beta1.TextSegment"\xa7\x01\n\x13ObjectTrackingFrame\x12`\n\x17normalized_bounding_box\x18\x01 \x01(\x0b\x32?.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xac\x02\n\x18ObjectTrackingAnnotation\x12@\n\x06\x65ntity\x18\x01 \x01(\x0b\x32\x30.google.cloud.videointelligence.v1p3beta1.Entity\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x12M\n\x06\x66rames\x18\x02 \x03(\x0b\x32=.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame\x12I\n\x07segment\x18\x03 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegmentH\x00\x12\x12\n\x08track_id\x18\x05 \x01(\x03H\x00\x42\x0c\n\ntrack_info"\xe8\x01\n\x19LogoRecognitionAnnotation\x12@\n\x06\x65ntity\x18\x01 \x01(\x0b\x32\x30.google.cloud.videointelligence.v1p3beta1.Entity\x12?\n\x06tracks\x18\x02 \x03(\x0b\x32/.google.cloud.videointelligence.v1p3beta1.Track\x12H\n\x08segments\x18\x03 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment"\xa5\x01\n\x1dStreamingAnnotateVideoRequest\x12V\n\x0cvideo_config\x18\x01 \x01(\x0b\x32>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfigH\x00\x12\x17\n\rinput_content\x18\x02 \x01(\x0cH\x00\x42\x13\n\x11streaming_request"\xca\x01\n\x1eStreamingAnnotateVideoResponse\x12!\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.Status\x12\x65\n\x12\x61nnotation_results\x18\x02 \x01(\x0b\x32I.google.cloud.videointelligence.v1p3beta1.StreamingVideoAnnotationResults\x12\x1e\n\x16\x61nnotation_results_uri\x18\x03 \x01(\t"9\n#StreamingAutomlClassificationConfig\x12\x12\n\nmodel_name\x18\x01 \x01(\t"9\n#StreamingAutomlObjectTrackingConfig\x12\x12\n\nmodel_name\x18\x01 \x01(\t")\n\'StreamingExplicitContentDetectionConfig":\n\x1dStreamingLabelDetectionConfig\x12\x19\n\x11stationary_camera\x18\x01 \x01(\x08"\x1f\n\x1dStreamingObjectTrackingConfig"$\n"StreamingShotChangeDetectionConfig"o\n\x16StreamingStorageConfig\x12(\n enable_storage_annotation_result\x18\x01 \x01(\x08\x12+\n#annotation_result_storage_directory\x18\x03 \x01(\t"\x8b\x03\n\x1fStreamingVideoAnnotationResults\x12P\n\x10shot_annotations\x18\x01 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12T\n\x11label_annotations\x18\x02 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1p3beta1.LabelAnnotation\x12`\n\x13\x65xplicit_annotation\x18\x03 \x01(\x0b\x32\x43.google.cloud.videointelligence.v1p3beta1.ExplicitContentAnnotation\x12^\n\x12object_annotations\x18\x04 \x03(\x0b\x32\x42.google.cloud.videointelligence.v1p3beta1.ObjectTrackingAnnotation"\x8c\x07\n\x14StreamingVideoConfig\x12K\n\x07\x66\x65\x61ture\x18\x01 \x01(\x0e\x32:.google.cloud.videointelligence.v1p3beta1.StreamingFeature\x12t\n\x1cshot_change_detection_config\x18\x02 \x01(\x0b\x32L.google.cloud.videointelligence.v1p3beta1.StreamingShotChangeDetectionConfigH\x00\x12i\n\x16label_detection_config\x18\x03 \x01(\x0b\x32G.google.cloud.videointelligence.v1p3beta1.StreamingLabelDetectionConfigH\x00\x12~\n!explicit_content_detection_config\x18\x04 \x01(\x0b\x32Q.google.cloud.videointelligence.v1p3beta1.StreamingExplicitContentDetectionConfigH\x00\x12i\n\x16object_tracking_config\x18\x05 \x01(\x0b\x32G.google.cloud.videointelligence.v1p3beta1.StreamingObjectTrackingConfigH\x00\x12u\n\x1c\x61utoml_classification_config\x18\x15 \x01(\x0b\x32M.google.cloud.videointelligence.v1p3beta1.StreamingAutomlClassificationConfigH\x00\x12v\n\x1d\x61utoml_object_tracking_config\x18\x16 \x01(\x0b\x32M.google.cloud.videointelligence.v1p3beta1.StreamingAutomlObjectTrackingConfigH\x00\x12X\n\x0estorage_config\x18\x1e \x01(\x0b\x32@.google.cloud.videointelligence.v1p3beta1.StreamingStorageConfigB\x12\n\x10streaming_config*\xcb\x01\n\x07\x46\x65\x61ture\x12\x17\n\x13\x46\x45\x41TURE_UNSPECIFIED\x10\x00\x12\x13\n\x0fLABEL_DETECTION\x10\x01\x12\x19\n\x15SHOT_CHANGE_DETECTION\x10\x02\x12\x1e\n\x1a\x45XPLICIT_CONTENT_DETECTION\x10\x03\x12\x18\n\x14SPEECH_TRANSCRIPTION\x10\x06\x12\x12\n\x0eTEXT_DETECTION\x10\x07\x12\x13\n\x0fOBJECT_TRACKING\x10\t\x12\x14\n\x10LOGO_RECOGNITION\x10\x0c*r\n\x12LabelDetectionMode\x12$\n LABEL_DETECTION_MODE_UNSPECIFIED\x10\x00\x12\r\n\tSHOT_MODE\x10\x01\x12\x0e\n\nFRAME_MODE\x10\x02\x12\x17\n\x13SHOT_AND_FRAME_MODE\x10\x03*t\n\nLikelihood\x12\x1a\n\x16LIKELIHOOD_UNSPECIFIED\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05*\x8d\x02\n\x10StreamingFeature\x12!\n\x1dSTREAMING_FEATURE_UNSPECIFIED\x10\x00\x12\x1d\n\x19STREAMING_LABEL_DETECTION\x10\x01\x12#\n\x1fSTREAMING_SHOT_CHANGE_DETECTION\x10\x02\x12(\n$STREAMING_EXPLICIT_CONTENT_DETECTION\x10\x03\x12\x1d\n\x19STREAMING_OBJECT_TRACKING\x10\x04\x12#\n\x1fSTREAMING_AUTOML_CLASSIFICATION\x10\x15\x12$\n STREAMING_AUTOML_OBJECT_TRACKING\x10\x16\x32\xb2\x01\n\x18VideoIntelligenceService\x12\x95\x01\n\rAnnotateVideo\x12>.google.cloud.videointelligence.v1p3beta1.AnnotateVideoRequest\x1a\x1d.google.longrunning.Operation"%\x82\xd3\xe4\x93\x02\x1f"\x1a/v1p3beta1/videos:annotate:\x01*2\xd5\x01\n!StreamingVideoIntelligenceService\x12\xaf\x01\n\x16StreamingAnnotateVideo\x12G.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest\x1aH.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoResponse(\x01\x30\x01\x42\xae\x02\n,com.google.cloud.videointelligence.v1p3beta1B\x1dVideoIntelligenceServiceProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1p3beta1;videointelligence\xaa\x02(Google.Cloud.VideoIntelligence.V1P3Beta1\xca\x02(Google\\Cloud\\VideoIntelligence\\V1p3beta1\xea\x02+Google::Cloud::VideoIntelligence::V1p3beta1b\x06proto3' + '\nGgoogle/cloud/videointelligence_v1p3beta1/proto/video_intelligence.proto\x12(google.cloud.videointelligence.v1p3beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\x8c\x02\n\x14\x41nnotateVideoRequest\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x15\n\rinput_content\x18\x06 \x01(\x0c\x12H\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0e\x32\x31.google.cloud.videointelligence.v1p3beta1.FeatureB\x03\xe0\x41\x02\x12M\n\rvideo_context\x18\x03 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoContext\x12\x17\n\noutput_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0blocation_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\xc0\x05\n\x0cVideoContext\x12H\n\x08segments\x18\x01 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12^\n\x16label_detection_config\x18\x02 \x01(\x0b\x32>.google.cloud.videointelligence.v1p3beta1.LabelDetectionConfig\x12i\n\x1cshot_change_detection_config\x18\x03 \x01(\x0b\x32\x43.google.cloud.videointelligence.v1p3beta1.ShotChangeDetectionConfig\x12s\n!explicit_content_detection_config\x18\x04 \x01(\x0b\x32H.google.cloud.videointelligence.v1p3beta1.ExplicitContentDetectionConfig\x12h\n\x1bspeech_transcription_config\x18\x06 \x01(\x0b\x32\x43.google.cloud.videointelligence.v1p3beta1.SpeechTranscriptionConfig\x12\\\n\x15text_detection_config\x18\x08 \x01(\x0b\x32=.google.cloud.videointelligence.v1p3beta1.TextDetectionConfig\x12^\n\x16object_tracking_config\x18\r \x01(\x0b\x32>.google.cloud.videointelligence.v1p3beta1.ObjectTrackingConfig"\xe4\x01\n\x14LabelDetectionConfig\x12Z\n\x14label_detection_mode\x18\x01 \x01(\x0e\x32<.google.cloud.videointelligence.v1p3beta1.LabelDetectionMode\x12\x19\n\x11stationary_camera\x18\x02 \x01(\x08\x12\r\n\x05model\x18\x03 \x01(\t\x12"\n\x1a\x66rame_confidence_threshold\x18\x04 \x01(\x02\x12"\n\x1avideo_confidence_threshold\x18\x05 \x01(\x02"*\n\x19ShotChangeDetectionConfig\x12\r\n\x05model\x18\x01 \x01(\t"%\n\x14ObjectTrackingConfig\x12\r\n\x05model\x18\x01 \x01(\t"/\n\x1e\x45xplicitContentDetectionConfig\x12\r\n\x05model\x18\x01 \x01(\t"<\n\x13TextDetectionConfig\x12\x16\n\x0elanguage_hints\x18\x01 \x03(\t\x12\r\n\x05model\x18\x02 \x01(\t"x\n\x0cVideoSegment\x12\x34\n\x11start_time_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0f\x65nd_time_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"k\n\x0cLabelSegment\x12G\n\x07segment\x18\x01 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02"P\n\nLabelFrame\x12.\n\x0btime_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x12\n\nconfidence\x18\x02 \x01(\x02"G\n\x06\x45ntity\x12\x11\n\tentity_id\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x15\n\rlanguage_code\x18\x03 \x01(\t"\xb0\x02\n\x0fLabelAnnotation\x12@\n\x06\x65ntity\x18\x01 \x01(\x0b\x32\x30.google.cloud.videointelligence.v1p3beta1.Entity\x12K\n\x11\x63\x61tegory_entities\x18\x02 \x03(\x0b\x32\x30.google.cloud.videointelligence.v1p3beta1.Entity\x12H\n\x08segments\x18\x03 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.LabelSegment\x12\x44\n\x06\x66rames\x18\x04 \x03(\x0b\x32\x34.google.cloud.videointelligence.v1p3beta1.LabelFrame"\x9c\x01\n\x14\x45xplicitContentFrame\x12.\n\x0btime_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12T\n\x16pornography_likelihood\x18\x02 \x01(\x0e\x32\x34.google.cloud.videointelligence.v1p3beta1.Likelihood"k\n\x19\x45xplicitContentAnnotation\x12N\n\x06\x66rames\x18\x01 \x03(\x0b\x32>.google.cloud.videointelligence.v1p3beta1.ExplicitContentFrame"Q\n\x15NormalizedBoundingBox\x12\x0c\n\x04left\x18\x01 \x01(\x02\x12\x0b\n\x03top\x18\x02 \x01(\x02\x12\r\n\x05right\x18\x03 \x01(\x02\x12\x0e\n\x06\x62ottom\x18\x04 \x01(\x02"\xfb\x01\n\x11TimestampedObject\x12`\n\x17normalized_bounding_box\x18\x01 \x01(\x0b\x32?.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12T\n\nattributes\x18\x03 \x03(\x0b\x32;.google.cloud.videointelligence.v1p3beta1.DetectedAttributeB\x03\xe0\x41\x01"\x99\x02\n\x05Track\x12G\n\x07segment\x18\x01 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12X\n\x13timestamped_objects\x18\x02 \x03(\x0b\x32;.google.cloud.videointelligence.v1p3beta1.TimestampedObject\x12T\n\nattributes\x18\x03 \x03(\x0b\x32;.google.cloud.videointelligence.v1p3beta1.DetectedAttributeB\x03\xe0\x41\x01\x12\x17\n\nconfidence\x18\x04 \x01(\x02\x42\x03\xe0\x41\x01"D\n\x11\x44\x65tectedAttribute\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\r\n\x05value\x18\x03 \x01(\t"D\n\tCelebrity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t"\xab\x02\n\x0e\x43\x65lebrityTrack\x12\x61\n\x0b\x63\x65lebrities\x18\x01 \x03(\x0b\x32L.google.cloud.videointelligence.v1p3beta1.CelebrityTrack.RecognizedCelebrity\x12\x43\n\nface_track\x18\x03 \x01(\x0b\x32/.google.cloud.videointelligence.v1p3beta1.Track\x1aq\n\x13RecognizedCelebrity\x12\x46\n\tcelebrity\x18\x01 \x01(\x0b\x32\x33.google.cloud.videointelligence.v1p3beta1.Celebrity\x12\x12\n\nconfidence\x18\x02 \x01(\x02"t\n\x1e\x43\x65lebrityRecognitionAnnotation\x12R\n\x10\x63\x65lebrity_tracks\x18\x01 \x03(\x0b\x32\x38.google.cloud.videointelligence.v1p3beta1.CelebrityTrack"\x9d\n\n\x16VideoAnnotationResults\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12G\n\x07segment\x18\n \x01(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12\\\n\x19segment_label_annotations\x18\x02 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1p3beta1.LabelAnnotation\x12\x65\n"segment_presence_label_annotations\x18\x17 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1p3beta1.LabelAnnotation\x12Y\n\x16shot_label_annotations\x18\x03 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1p3beta1.LabelAnnotation\x12\x62\n\x1fshot_presence_label_annotations\x18\x18 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1p3beta1.LabelAnnotation\x12Z\n\x17\x66rame_label_annotations\x18\x04 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1p3beta1.LabelAnnotation\x12P\n\x10shot_annotations\x18\x06 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12`\n\x13\x65xplicit_annotation\x18\x07 \x01(\x0b\x32\x43.google.cloud.videointelligence.v1p3beta1.ExplicitContentAnnotation\x12\\\n\x15speech_transcriptions\x18\x0b \x03(\x0b\x32=.google.cloud.videointelligence.v1p3beta1.SpeechTranscription\x12R\n\x10text_annotations\x18\x0c \x03(\x0b\x32\x38.google.cloud.videointelligence.v1p3beta1.TextAnnotation\x12^\n\x12object_annotations\x18\x0e \x03(\x0b\x32\x42.google.cloud.videointelligence.v1p3beta1.ObjectTrackingAnnotation\x12i\n\x1clogo_recognition_annotations\x18\x13 \x03(\x0b\x32\x43.google.cloud.videointelligence.v1p3beta1.LogoRecognitionAnnotation\x12s\n!celebrity_recognition_annotations\x18\x15 \x01(\x0b\x32H.google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation\x12!\n\x05\x65rror\x18\t \x01(\x0b\x32\x12.google.rpc.Status"u\n\x15\x41nnotateVideoResponse\x12\\\n\x12\x61nnotation_results\x18\x01 \x03(\x0b\x32@.google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults"\xb4\x02\n\x17VideoAnnotationProgress\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x18\n\x10progress_percent\x18\x02 \x01(\x05\x12.\n\nstart_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x42\n\x07\x66\x65\x61ture\x18\x05 \x01(\x0e\x32\x31.google.cloud.videointelligence.v1p3beta1.Feature\x12G\n\x07segment\x18\x06 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment"w\n\x15\x41nnotateVideoProgress\x12^\n\x13\x61nnotation_progress\x18\x01 \x03(\x0b\x32\x41.google.cloud.videointelligence.v1p3beta1.VideoAnnotationProgress"\x88\x03\n\x19SpeechTranscriptionConfig\x12\x1a\n\rlanguage_code\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x1d\n\x10max_alternatives\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1d\n\x10\x66ilter_profanity\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12U\n\x0fspeech_contexts\x18\x04 \x03(\x0b\x32\x37.google.cloud.videointelligence.v1p3beta1.SpeechContextB\x03\xe0\x41\x01\x12)\n\x1c\x65nable_automatic_punctuation\x18\x05 \x01(\x08\x42\x03\xe0\x41\x01\x12\x19\n\x0c\x61udio_tracks\x18\x06 \x03(\x05\x42\x03\xe0\x41\x01\x12\'\n\x1a\x65nable_speaker_diarization\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12&\n\x19\x64iarization_speaker_count\x18\x08 \x01(\x05\x42\x03\xe0\x41\x01\x12#\n\x16\x65nable_word_confidence\x18\t \x01(\x08\x42\x03\xe0\x41\x01"%\n\rSpeechContext\x12\x14\n\x07phrases\x18\x01 \x03(\tB\x03\xe0\x41\x01"\x8f\x01\n\x13SpeechTranscription\x12\\\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x46.google.cloud.videointelligence.v1p3beta1.SpeechRecognitionAlternative\x12\x1a\n\rlanguage_code\x18\x02 \x01(\tB\x03\xe0\x41\x03"\x93\x01\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x17\n\nconfidence\x18\x02 \x01(\x02\x42\x03\xe0\x41\x03\x12\x46\n\x05words\x18\x03 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1p3beta1.WordInfoB\x03\xe0\x41\x03"\xa7\x01\n\x08WordInfo\x12-\n\nstart_time\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12+\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0c\n\x04word\x18\x03 \x01(\t\x12\x17\n\nconfidence\x18\x04 \x01(\x02\x42\x03\xe0\x41\x03\x12\x18\n\x0bspeaker_tag\x18\x05 \x01(\x05\x42\x03\xe0\x41\x03"(\n\x10NormalizedVertex\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02"f\n\x16NormalizedBoundingPoly\x12L\n\x08vertices\x18\x01 \x03(\x0b\x32:.google.cloud.videointelligence.v1p3beta1.NormalizedVertex"\xaf\x01\n\x0bTextSegment\x12G\n\x07segment\x18\x01 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\x43\n\x06\x66rames\x18\x03 \x03(\x0b\x32\x33.google.cloud.videointelligence.v1p3beta1.TextFrame"\x9b\x01\n\tTextFrame\x12^\n\x14rotated_bounding_box\x18\x01 \x01(\x0b\x32@.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingPoly\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"g\n\x0eTextAnnotation\x12\x0c\n\x04text\x18\x01 \x01(\t\x12G\n\x08segments\x18\x02 \x03(\x0b\x32\x35.google.cloud.videointelligence.v1p3beta1.TextSegment"\xa7\x01\n\x13ObjectTrackingFrame\x12`\n\x17normalized_bounding_box\x18\x01 \x01(\x0b\x32?.google.cloud.videointelligence.v1p3beta1.NormalizedBoundingBox\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xac\x02\n\x18ObjectTrackingAnnotation\x12@\n\x06\x65ntity\x18\x01 \x01(\x0b\x32\x30.google.cloud.videointelligence.v1p3beta1.Entity\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x12M\n\x06\x66rames\x18\x02 \x03(\x0b\x32=.google.cloud.videointelligence.v1p3beta1.ObjectTrackingFrame\x12I\n\x07segment\x18\x03 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegmentH\x00\x12\x12\n\x08track_id\x18\x05 \x01(\x03H\x00\x42\x0c\n\ntrack_info"\xe8\x01\n\x19LogoRecognitionAnnotation\x12@\n\x06\x65ntity\x18\x01 \x01(\x0b\x32\x30.google.cloud.videointelligence.v1p3beta1.Entity\x12?\n\x06tracks\x18\x02 \x03(\x0b\x32/.google.cloud.videointelligence.v1p3beta1.Track\x12H\n\x08segments\x18\x03 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment"\xa5\x01\n\x1dStreamingAnnotateVideoRequest\x12V\n\x0cvideo_config\x18\x01 \x01(\x0b\x32>.google.cloud.videointelligence.v1p3beta1.StreamingVideoConfigH\x00\x12\x17\n\rinput_content\x18\x02 \x01(\x0cH\x00\x42\x13\n\x11streaming_request"\xca\x01\n\x1eStreamingAnnotateVideoResponse\x12!\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.Status\x12\x65\n\x12\x61nnotation_results\x18\x02 \x01(\x0b\x32I.google.cloud.videointelligence.v1p3beta1.StreamingVideoAnnotationResults\x12\x1e\n\x16\x61nnotation_results_uri\x18\x03 \x01(\t"9\n#StreamingAutomlClassificationConfig\x12\x12\n\nmodel_name\x18\x01 \x01(\t"9\n#StreamingAutomlObjectTrackingConfig\x12\x12\n\nmodel_name\x18\x01 \x01(\t")\n\'StreamingExplicitContentDetectionConfig":\n\x1dStreamingLabelDetectionConfig\x12\x19\n\x11stationary_camera\x18\x01 \x01(\x08"\x1f\n\x1dStreamingObjectTrackingConfig"$\n"StreamingShotChangeDetectionConfig"o\n\x16StreamingStorageConfig\x12(\n enable_storage_annotation_result\x18\x01 \x01(\x08\x12+\n#annotation_result_storage_directory\x18\x03 \x01(\t"\x8b\x03\n\x1fStreamingVideoAnnotationResults\x12P\n\x10shot_annotations\x18\x01 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1p3beta1.VideoSegment\x12T\n\x11label_annotations\x18\x02 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1p3beta1.LabelAnnotation\x12`\n\x13\x65xplicit_annotation\x18\x03 \x01(\x0b\x32\x43.google.cloud.videointelligence.v1p3beta1.ExplicitContentAnnotation\x12^\n\x12object_annotations\x18\x04 \x03(\x0b\x32\x42.google.cloud.videointelligence.v1p3beta1.ObjectTrackingAnnotation"\x8c\x07\n\x14StreamingVideoConfig\x12K\n\x07\x66\x65\x61ture\x18\x01 \x01(\x0e\x32:.google.cloud.videointelligence.v1p3beta1.StreamingFeature\x12t\n\x1cshot_change_detection_config\x18\x02 \x01(\x0b\x32L.google.cloud.videointelligence.v1p3beta1.StreamingShotChangeDetectionConfigH\x00\x12i\n\x16label_detection_config\x18\x03 \x01(\x0b\x32G.google.cloud.videointelligence.v1p3beta1.StreamingLabelDetectionConfigH\x00\x12~\n!explicit_content_detection_config\x18\x04 \x01(\x0b\x32Q.google.cloud.videointelligence.v1p3beta1.StreamingExplicitContentDetectionConfigH\x00\x12i\n\x16object_tracking_config\x18\x05 \x01(\x0b\x32G.google.cloud.videointelligence.v1p3beta1.StreamingObjectTrackingConfigH\x00\x12u\n\x1c\x61utoml_classification_config\x18\x15 \x01(\x0b\x32M.google.cloud.videointelligence.v1p3beta1.StreamingAutomlClassificationConfigH\x00\x12v\n\x1d\x61utoml_object_tracking_config\x18\x16 \x01(\x0b\x32M.google.cloud.videointelligence.v1p3beta1.StreamingAutomlObjectTrackingConfigH\x00\x12X\n\x0estorage_config\x18\x1e \x01(\x0b\x32@.google.cloud.videointelligence.v1p3beta1.StreamingStorageConfigB\x12\n\x10streaming_config*\xe6\x01\n\x07\x46\x65\x61ture\x12\x17\n\x13\x46\x45\x41TURE_UNSPECIFIED\x10\x00\x12\x13\n\x0fLABEL_DETECTION\x10\x01\x12\x19\n\x15SHOT_CHANGE_DETECTION\x10\x02\x12\x1e\n\x1a\x45XPLICIT_CONTENT_DETECTION\x10\x03\x12\x18\n\x14SPEECH_TRANSCRIPTION\x10\x06\x12\x12\n\x0eTEXT_DETECTION\x10\x07\x12\x13\n\x0fOBJECT_TRACKING\x10\t\x12\x14\n\x10LOGO_RECOGNITION\x10\x0c\x12\x19\n\x15\x43\x45LEBRITY_RECOGNITION\x10\r*r\n\x12LabelDetectionMode\x12$\n LABEL_DETECTION_MODE_UNSPECIFIED\x10\x00\x12\r\n\tSHOT_MODE\x10\x01\x12\x0e\n\nFRAME_MODE\x10\x02\x12\x17\n\x13SHOT_AND_FRAME_MODE\x10\x03*t\n\nLikelihood\x12\x1a\n\x16LIKELIHOOD_UNSPECIFIED\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05*\x8d\x02\n\x10StreamingFeature\x12!\n\x1dSTREAMING_FEATURE_UNSPECIFIED\x10\x00\x12\x1d\n\x19STREAMING_LABEL_DETECTION\x10\x01\x12#\n\x1fSTREAMING_SHOT_CHANGE_DETECTION\x10\x02\x12(\n$STREAMING_EXPLICIT_CONTENT_DETECTION\x10\x03\x12\x1d\n\x19STREAMING_OBJECT_TRACKING\x10\x04\x12#\n\x1fSTREAMING_AUTOML_CLASSIFICATION\x10\x15\x12$\n STREAMING_AUTOML_OBJECT_TRACKING\x10\x16\x32\xce\x02\n\x18VideoIntelligenceService\x12\xdb\x01\n\rAnnotateVideo\x12>.google.cloud.videointelligence.v1p3beta1.AnnotateVideoRequest\x1a\x1d.google.longrunning.Operation"k\x82\xd3\xe4\x93\x02\x1f"\x1a/v1p3beta1/videos:annotate:\x01*\xda\x41\x12input_uri,features\xca\x41.\n\x15\x41nnotateVideoResponse\x12\x15\x41nnotateVideoProgress\x1aT\xca\x41 videointelligence.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platform2\xad\x02\n!StreamingVideoIntelligenceService\x12\xb1\x01\n\x16StreamingAnnotateVideo\x12G.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoRequest\x1aH.google.cloud.videointelligence.v1p3beta1.StreamingAnnotateVideoResponse"\x00(\x01\x30\x01\x1aT\xca\x41 videointelligence.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x80\x02\n,com.google.cloud.videointelligence.v1p3beta1B\x1dVideoIntelligenceServiceProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1p3beta1;videointelligence\xaa\x02(Google.Cloud.VideoIntelligence.V1P3Beta1\xca\x02(Google\\Cloud\\VideoIntelligence\\V1p3beta1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, @@ -102,11 +106,18 @@ serialized_options=None, type=None, ), + _descriptor.EnumValueDescriptor( + name="CELEBRITY_RECOGNITION", + index=8, + number=13, + serialized_options=None, + type=None, + ), ], containing_type=None, serialized_options=None, - serialized_start=8836, - serialized_end=9039, + serialized_start=10023, + serialized_end=10253, ) _sym_db.RegisterEnumDescriptor(_FEATURE) @@ -140,8 +151,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=9041, - serialized_end=9155, + serialized_start=10255, + serialized_end=10369, ) _sym_db.RegisterEnumDescriptor(_LABELDETECTIONMODE) @@ -177,8 +188,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=9157, - serialized_end=9273, + serialized_start=10371, + serialized_end=10487, ) _sym_db.RegisterEnumDescriptor(_LIKELIHOOD) @@ -241,8 +252,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=9276, - serialized_end=9545, + serialized_start=10490, + serialized_end=10759, ) _sym_db.RegisterEnumDescriptor(_STREAMINGFEATURE) @@ -255,6 +266,7 @@ TEXT_DETECTION = 7 OBJECT_TRACKING = 9 LOGO_RECOGNITION = 12 +CELEBRITY_RECOGNITION = 13 LABEL_DETECTION_MODE_UNSPECIFIED = 0 SHOT_MODE = 1 FRAME_MODE = 2 @@ -332,7 +344,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -368,7 +380,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -386,7 +398,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -398,8 +410,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=275, - serialized_end=528, + serialized_start=333, + serialized_end=601, ) @@ -545,8 +557,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=531, - serialized_end=1235, + serialized_start=604, + serialized_end=1308, ) @@ -656,8 +668,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1238, - serialized_end=1466, + serialized_start=1311, + serialized_end=1539, ) @@ -695,8 +707,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1468, - serialized_end=1510, + serialized_start=1541, + serialized_end=1583, ) @@ -734,8 +746,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1512, - serialized_end=1549, + serialized_start=1585, + serialized_end=1622, ) @@ -773,8 +785,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1551, - serialized_end=1598, + serialized_start=1624, + serialized_end=1671, ) @@ -830,8 +842,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1600, - serialized_end=1660, + serialized_start=1673, + serialized_end=1733, ) @@ -887,8 +899,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1662, - serialized_end=1782, + serialized_start=1735, + serialized_end=1855, ) @@ -944,8 +956,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1784, - serialized_end=1891, + serialized_start=1857, + serialized_end=1964, ) @@ -1001,8 +1013,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1893, - serialized_end=1973, + serialized_start=1966, + serialized_end=2046, ) @@ -1076,8 +1088,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1975, - serialized_end=2046, + serialized_start=2048, + serialized_end=2119, ) @@ -1169,8 +1181,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2049, - serialized_end=2353, + serialized_start=2122, + serialized_end=2426, ) @@ -1226,8 +1238,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2356, - serialized_end=2512, + serialized_start=2429, + serialized_end=2585, ) @@ -1265,8 +1277,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2514, - serialized_end=2621, + serialized_start=2587, + serialized_end=2694, ) @@ -1358,8 +1370,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2623, - serialized_end=2704, + serialized_start=2696, + serialized_end=2777, ) @@ -1421,7 +1433,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1433,8 +1445,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2707, - serialized_end=2953, + serialized_start=2780, + serialized_end=3031, ) @@ -1496,7 +1508,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1514,7 +1526,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1526,8 +1538,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2956, - serialized_end=3227, + serialized_start=3034, + serialized_end=3315, ) @@ -1601,8 +1613,235 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3229, - serialized_end=3297, + serialized_start=3317, + serialized_end=3385, +) + + +_CELEBRITY = _descriptor.Descriptor( + name="Celebrity", + full_name="google.cloud.videointelligence.v1p3beta1.Celebrity", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.videointelligence.v1p3beta1.Celebrity.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="display_name", + full_name="google.cloud.videointelligence.v1p3beta1.Celebrity.display_name", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="google.cloud.videointelligence.v1p3beta1.Celebrity.description", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3387, + serialized_end=3455, +) + + +_CELEBRITYTRACK_RECOGNIZEDCELEBRITY = _descriptor.Descriptor( + name="RecognizedCelebrity", + full_name="google.cloud.videointelligence.v1p3beta1.CelebrityTrack.RecognizedCelebrity", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="celebrity", + full_name="google.cloud.videointelligence.v1p3beta1.CelebrityTrack.RecognizedCelebrity.celebrity", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="confidence", + full_name="google.cloud.videointelligence.v1p3beta1.CelebrityTrack.RecognizedCelebrity.confidence", + index=1, + number=2, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3644, + serialized_end=3757, +) + +_CELEBRITYTRACK = _descriptor.Descriptor( + name="CelebrityTrack", + full_name="google.cloud.videointelligence.v1p3beta1.CelebrityTrack", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="celebrities", + full_name="google.cloud.videointelligence.v1p3beta1.CelebrityTrack.celebrities", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="face_track", + full_name="google.cloud.videointelligence.v1p3beta1.CelebrityTrack.face_track", + index=1, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_CELEBRITYTRACK_RECOGNIZEDCELEBRITY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3458, + serialized_end=3757, +) + + +_CELEBRITYRECOGNITIONANNOTATION = _descriptor.Descriptor( + name="CelebrityRecognitionAnnotation", + full_name="google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="celebrity_tracks", + full_name="google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation.celebrity_tracks", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3759, + serialized_end=3875, ) @@ -1631,10 +1870,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="segment", + full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.segment", + index=1, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="segment_label_annotations", full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.segment_label_annotations", - index=1, + index=2, number=2, type=11, cpp_type=10, @@ -1649,10 +1906,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="segment_presence_label_annotations", + full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.segment_presence_label_annotations", + index=3, + number=23, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="shot_label_annotations", full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.shot_label_annotations", - index=2, + index=4, number=3, type=11, cpp_type=10, @@ -1667,10 +1942,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="shot_presence_label_annotations", + full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.shot_presence_label_annotations", + index=5, + number=24, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="frame_label_annotations", full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.frame_label_annotations", - index=3, + index=6, number=4, type=11, cpp_type=10, @@ -1688,7 +1981,7 @@ _descriptor.FieldDescriptor( name="shot_annotations", full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.shot_annotations", - index=4, + index=7, number=6, type=11, cpp_type=10, @@ -1706,7 +1999,7 @@ _descriptor.FieldDescriptor( name="explicit_annotation", full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.explicit_annotation", - index=5, + index=8, number=7, type=11, cpp_type=10, @@ -1724,7 +2017,7 @@ _descriptor.FieldDescriptor( name="speech_transcriptions", full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.speech_transcriptions", - index=6, + index=9, number=11, type=11, cpp_type=10, @@ -1742,7 +2035,7 @@ _descriptor.FieldDescriptor( name="text_annotations", full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.text_annotations", - index=7, + index=10, number=12, type=11, cpp_type=10, @@ -1760,7 +2053,7 @@ _descriptor.FieldDescriptor( name="object_annotations", full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.object_annotations", - index=8, + index=11, number=14, type=11, cpp_type=10, @@ -1778,7 +2071,7 @@ _descriptor.FieldDescriptor( name="logo_recognition_annotations", full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.logo_recognition_annotations", - index=9, + index=12, number=19, type=11, cpp_type=10, @@ -1793,10 +2086,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="celebrity_recognition_annotations", + full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.celebrity_recognition_annotations", + index=13, + number=21, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="error", full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationResults.error", - index=10, + index=14, number=9, type=11, cpp_type=10, @@ -1820,8 +2131,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3300, - serialized_end=4216, + serialized_start=3878, + serialized_end=5187, ) @@ -1859,8 +2170,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4218, - serialized_end=4335, + serialized_start=5189, + serialized_end=5306, ) @@ -1943,6 +2254,42 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="feature", + full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationProgress.feature", + index=4, + number=5, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="segment", + full_name="google.cloud.videointelligence.v1p3beta1.VideoAnnotationProgress.segment", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -1952,8 +2299,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4338, - serialized_end=4505, + serialized_start=5309, + serialized_end=5617, ) @@ -1991,8 +2338,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4507, - serialized_end=4626, + serialized_start=5619, + serialized_end=5738, ) @@ -2018,7 +2365,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2036,7 +2383,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2054,7 +2401,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2072,7 +2419,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2090,7 +2437,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2108,7 +2455,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2126,7 +2473,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2144,7 +2491,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2162,7 +2509,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2174,8 +2521,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4629, - serialized_end=4976, + serialized_start=5741, + serialized_end=6133, ) @@ -2201,7 +2548,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -2213,8 +2560,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4978, - serialized_end=5010, + serialized_start=6135, + serialized_end=6172, ) @@ -2258,7 +2605,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2270,8 +2617,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5013, - serialized_end=5151, + serialized_start=6175, + serialized_end=6318, ) @@ -2315,7 +2662,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2333,7 +2680,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2345,8 +2692,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5154, - serialized_end=5291, + serialized_start=6321, + serialized_end=6468, ) @@ -2426,7 +2773,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2444,7 +2791,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2456,8 +2803,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5294, - serialized_end=5451, + serialized_start=6471, + serialized_end=6638, ) @@ -2513,8 +2860,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5453, - serialized_end=5493, + serialized_start=6640, + serialized_end=6680, ) @@ -2552,8 +2899,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5495, - serialized_end=5597, + serialized_start=6682, + serialized_end=6784, ) @@ -2627,8 +2974,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5600, - serialized_end=5775, + serialized_start=6787, + serialized_end=6962, ) @@ -2684,8 +3031,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5778, - serialized_end=5933, + serialized_start=6965, + serialized_end=7120, ) @@ -2741,8 +3088,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5935, - serialized_end=6038, + serialized_start=7122, + serialized_end=7225, ) @@ -2798,8 +3145,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6041, - serialized_end=6208, + serialized_start=7228, + serialized_end=7395, ) @@ -2917,8 +3264,8 @@ fields=[], ) ], - serialized_start=6211, - serialized_end=6511, + serialized_start=7398, + serialized_end=7698, ) @@ -2992,8 +3339,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6514, - serialized_end=6746, + serialized_start=7701, + serialized_end=7933, ) @@ -3057,8 +3404,8 @@ fields=[], ) ], - serialized_start=6749, - serialized_end=6914, + serialized_start=7936, + serialized_end=8101, ) @@ -3132,8 +3479,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6917, - serialized_end=7119, + serialized_start=8104, + serialized_end=8306, ) @@ -3171,8 +3518,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7121, - serialized_end=7178, + serialized_start=8308, + serialized_end=8365, ) @@ -3210,8 +3557,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7180, - serialized_end=7237, + serialized_start=8367, + serialized_end=8424, ) @@ -3230,8 +3577,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7239, - serialized_end=7280, + serialized_start=8426, + serialized_end=8467, ) @@ -3269,8 +3616,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7282, - serialized_end=7340, + serialized_start=8469, + serialized_end=8527, ) @@ -3289,8 +3636,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7342, - serialized_end=7373, + serialized_start=8529, + serialized_end=8560, ) @@ -3309,8 +3656,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7375, - serialized_end=7411, + serialized_start=8562, + serialized_end=8598, ) @@ -3366,8 +3713,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7413, - serialized_end=7524, + serialized_start=8600, + serialized_end=8711, ) @@ -3459,8 +3806,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7527, - serialized_end=7922, + serialized_start=8714, + serialized_end=9109, ) @@ -3632,8 +3979,8 @@ fields=[], ) ], - serialized_start=7925, - serialized_end=8833, + serialized_start=9112, + serialized_end=10020, ) _ANNOTATEVIDEOREQUEST.fields_by_name["features"].enum_type = _FEATURE @@ -3689,12 +4036,30 @@ _TRACK.fields_by_name["segment"].message_type = _VIDEOSEGMENT _TRACK.fields_by_name["timestamped_objects"].message_type = _TIMESTAMPEDOBJECT _TRACK.fields_by_name["attributes"].message_type = _DETECTEDATTRIBUTE +_CELEBRITYTRACK_RECOGNIZEDCELEBRITY.fields_by_name[ + "celebrity" +].message_type = _CELEBRITY +_CELEBRITYTRACK_RECOGNIZEDCELEBRITY.containing_type = _CELEBRITYTRACK +_CELEBRITYTRACK.fields_by_name[ + "celebrities" +].message_type = _CELEBRITYTRACK_RECOGNIZEDCELEBRITY +_CELEBRITYTRACK.fields_by_name["face_track"].message_type = _TRACK +_CELEBRITYRECOGNITIONANNOTATION.fields_by_name[ + "celebrity_tracks" +].message_type = _CELEBRITYTRACK +_VIDEOANNOTATIONRESULTS.fields_by_name["segment"].message_type = _VIDEOSEGMENT _VIDEOANNOTATIONRESULTS.fields_by_name[ "segment_label_annotations" ].message_type = _LABELANNOTATION +_VIDEOANNOTATIONRESULTS.fields_by_name[ + "segment_presence_label_annotations" +].message_type = _LABELANNOTATION _VIDEOANNOTATIONRESULTS.fields_by_name[ "shot_label_annotations" ].message_type = _LABELANNOTATION +_VIDEOANNOTATIONRESULTS.fields_by_name[ + "shot_presence_label_annotations" +].message_type = _LABELANNOTATION _VIDEOANNOTATIONRESULTS.fields_by_name[ "frame_label_annotations" ].message_type = _LABELANNOTATION @@ -3714,6 +4079,9 @@ _VIDEOANNOTATIONRESULTS.fields_by_name[ "logo_recognition_annotations" ].message_type = _LOGORECOGNITIONANNOTATION +_VIDEOANNOTATIONRESULTS.fields_by_name[ + "celebrity_recognition_annotations" +].message_type = _CELEBRITYRECOGNITIONANNOTATION _VIDEOANNOTATIONRESULTS.fields_by_name[ "error" ].message_type = google_dot_rpc_dot_status__pb2._STATUS @@ -3726,6 +4094,8 @@ _VIDEOANNOTATIONPROGRESS.fields_by_name[ "update_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_VIDEOANNOTATIONPROGRESS.fields_by_name["feature"].enum_type = _FEATURE +_VIDEOANNOTATIONPROGRESS.fields_by_name["segment"].message_type = _VIDEOSEGMENT _ANNOTATEVIDEOPROGRESS.fields_by_name[ "annotation_progress" ].message_type = _VIDEOANNOTATIONPROGRESS @@ -3889,6 +4259,11 @@ DESCRIPTOR.message_types_by_name["TimestampedObject"] = _TIMESTAMPEDOBJECT DESCRIPTOR.message_types_by_name["Track"] = _TRACK DESCRIPTOR.message_types_by_name["DetectedAttribute"] = _DETECTEDATTRIBUTE +DESCRIPTOR.message_types_by_name["Celebrity"] = _CELEBRITY +DESCRIPTOR.message_types_by_name["CelebrityTrack"] = _CELEBRITYTRACK +DESCRIPTOR.message_types_by_name[ + "CelebrityRecognitionAnnotation" +] = _CELEBRITYRECOGNITIONANNOTATION DESCRIPTOR.message_types_by_name["VideoAnnotationResults"] = _VIDEOANNOTATIONRESULTS DESCRIPTOR.message_types_by_name["AnnotateVideoResponse"] = _ANNOTATEVIDEORESPONSE DESCRIPTOR.message_types_by_name["VideoAnnotationProgress"] = _VIDEOANNOTATIONPROGRESS @@ -3975,11 +4350,11 @@ specified via ``input_uri``. If set, ``input_uri`` should be unset. features: - Requested video annotation features. + Required. Requested video annotation features. video_context: Additional video context and/or feature-specific parameters. output_uri: - Optional location where the output (in JSON format) should be + Optional. Location where the output (in JSON format) should be stored. Currently, only `Google Cloud Storage `__ URIs are supported, which must be specified in the following format: @@ -3988,7 +4363,7 @@ ]). For more information, see `Request URIs `__. location_id: - Optional cloud region where annotation should take place. + Optional. Cloud region where annotation should take place. Supported cloud regions: ``us-east1``, ``us-west1``, ``europe- west1``, ``asia-east1``. If no region is specified, a region will be determined based on video file location. @@ -4342,10 +4717,8 @@ dict( DESCRIPTOR=_TIMESTAMPEDOBJECT, __module__="google.cloud.videointelligence_v1p3beta1.proto.video_intelligence_pb2", - __doc__="""For tracking related features, such as LOGO\_RECOGNITION, - FACE\_DETECTION, CELEBRITY\_RECOGNITION, PERSON\_DETECTION. An object at - time\_offset with attributes, and located with - normalized\_bounding\_box. + __doc__="""For tracking related features. An object at time\_offset with + attributes, and located with normalized\_bounding\_box. Attributes: @@ -4413,6 +4786,92 @@ ) _sym_db.RegisterMessage(DetectedAttribute) +Celebrity = _reflection.GeneratedProtocolMessageType( + "Celebrity", + (_message.Message,), + dict( + DESCRIPTOR=_CELEBRITY, + __module__="google.cloud.videointelligence_v1p3beta1.proto.video_intelligence_pb2", + __doc__="""Celebrity definition. + + + Attributes: + name: + The resource name of the celebrity. Have the format ``video- + intelligence/kg-mid`` indicates a celebrity from preloaded + gallery. kg-mid is the id in Google knowledge graph, which is + unique for the celebrity. + display_name: + The celebrity name. + description: + Textual description of additional information about the + celebrity, if applicable. + """, + # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.Celebrity) + ), +) +_sym_db.RegisterMessage(Celebrity) + +CelebrityTrack = _reflection.GeneratedProtocolMessageType( + "CelebrityTrack", + (_message.Message,), + dict( + RecognizedCelebrity=_reflection.GeneratedProtocolMessageType( + "RecognizedCelebrity", + (_message.Message,), + dict( + DESCRIPTOR=_CELEBRITYTRACK_RECOGNIZEDCELEBRITY, + __module__="google.cloud.videointelligence_v1p3beta1.proto.video_intelligence_pb2", + __doc__="""The recognized celebrity with confidence score. + + + Attributes: + celebrity: + The recognized celebrity. + confidence: + Recognition confidence. Range [0, 1]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.CelebrityTrack.RecognizedCelebrity) + ), + ), + DESCRIPTOR=_CELEBRITYTRACK, + __module__="google.cloud.videointelligence_v1p3beta1.proto.video_intelligence_pb2", + __doc__="""The annotation result of a celebrity face track. RecognizedCelebrity + field could be empty if the face track does not have any matched + celebrities. + + + Attributes: + celebrities: + Top N match of the celebrities for the face in this track. + face_track: + A track of a person's face. + """, + # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.CelebrityTrack) + ), +) +_sym_db.RegisterMessage(CelebrityTrack) +_sym_db.RegisterMessage(CelebrityTrack.RecognizedCelebrity) + +CelebrityRecognitionAnnotation = _reflection.GeneratedProtocolMessageType( + "CelebrityRecognitionAnnotation", + (_message.Message,), + dict( + DESCRIPTOR=_CELEBRITYRECOGNITIONANNOTATION, + __module__="google.cloud.videointelligence_v1p3beta1.proto.video_intelligence_pb2", + __doc__="""Celebrity recognition annotation per video. + + + Attributes: + celebrity_tracks: + The tracks detected from the input video, including recognized + celebrities and other detected faces in the video. + """, + # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.CelebrityRecognitionAnnotation) + ), +) +_sym_db.RegisterMessage(CelebrityRecognitionAnnotation) + VideoAnnotationResults = _reflection.GeneratedProtocolMessageType( "VideoAnnotationResults", (_message.Message,), @@ -4426,12 +4885,32 @@ input_uri: Video file location in `Google Cloud Storage `__. + segment: + Video segment on which the annotation is run. segment_label_annotations: - Label annotations on video level or user specified segment - level. There is exactly one element for each unique label. + Topical label annotations on video level or user specified + segment level. There is exactly one element for each unique + label. + segment_presence_label_annotations: + Presence label annotations on video level or user specified + segment level. There is exactly one element for each unique + label. Compared to the existing topical + ``segment_label_annotations``, this field presents more fine- + grained, segment-level labels detected in video content and is + made available only when the client sets + ``LabelDetectionConfig.model`` to "builtin/latest" in the + request. shot_label_annotations: - Label annotations on shot level. There is exactly one element - for each unique label. + Topical label annotations on shot level. There is exactly one + element for each unique label. + shot_presence_label_annotations: + Presence label annotations on shot level. There is exactly one + element for each unique label. Compared to the existing + topical ``shot_label_annotations``, this field presents more + fine-grained, shot-level labels detected in video content and + is made available only when the client sets + ``LabelDetectionConfig.model`` to "builtin/latest" in the + request. frame_label_annotations: Label annotations on frame level. There is exactly one element for each unique label. @@ -4450,6 +4929,8 @@ logo_recognition_annotations: Annotations for list of logos detected, tracked and recognized in video. + celebrity_recognition_annotations: + Celebrity recognition annotations. error: If set, indicates an error. Note that for a single ``AnnotateVideoRequest`` some videos may succeed and some may @@ -4501,6 +4982,12 @@ Time when the request was received. update_time: Time of the most recent update. + feature: + Specifies which feature is being tracked if the request + contains more than one features. + segment: + Specifies which segment is being tracked if the request + contains more than one segments. """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.VideoAnnotationProgress) ), @@ -4539,13 +5026,13 @@ Attributes: language_code: - *Required* The language of the supplied audio as a `BCP-47 - `__ language - tag. Example: "en-US". See `Language Support + Required. *Required* The language of the supplied audio as a + `BCP-47 `__ + language tag. Example: "en-US". See `Language Support `__ for a list of the currently supported language codes. max_alternatives: - *Optional* Maximum number of recognition hypotheses to be + Optional. Maximum number of recognition hypotheses to be returned. Specifically, the maximum number of ``SpeechRecognitionAlternative`` messages within each ``SpeechTranscription``. The server may return fewer than @@ -4553,16 +5040,16 @@ of ``0`` or ``1`` will return a maximum of one. If omitted, will return a maximum of one. filter_profanity: - *Optional* If set to ``true``, the server will attempt to + Optional. If set to ``true``, the server will attempt to filter out profanities, replacing all but the initial character in each filtered word with asterisks, e.g. "f\*\*\*". If set to ``false`` or omitted, profanities won't be filtered out. speech_contexts: - *Optional* A means to provide context to assist the speech + Optional. A means to provide context to assist the speech recognition. enable_automatic_punctuation: - *Optional* If 'true', adds punctuation to recognition result + Optional. If 'true', adds punctuation to recognition result hypotheses. This feature is only available in select languages. Setting this for requests in other languages has no effect at all. The default 'false' value does not add @@ -4571,11 +5058,11 @@ users. In the future this may be exclusively available as a premium feature." audio_tracks: - *Optional* For file formats, such as MXF or MKV, supporting + Optional. For file formats, such as MXF or MKV, supporting multiple audio tracks, specify up to two tracks. Default: track 0. enable_speaker_diarization: - *Optional* If 'true', enables speaker detection for each + Optional. If 'true', enables speaker detection for each recognized word in the top alternative of the recognition result using a speaker\_tag provided in the WordInfo. Note: When this is true, we send all the words from the beginning of @@ -4584,13 +5071,13 @@ as our models learn to identify the speakers in the conversation over time. diarization_speaker_count: - *Optional* If set, specifies the estimated number of speakers + Optional. If set, specifies the estimated number of speakers in the conversation. If not set, defaults to '2'. Ignored unless enable\_speaker\_diarization is set to true. enable_word_confidence: - *Optional* If ``true``, the top result includes a list of - words and the confidence for those words. If ``false``, no - word-level confidence information is returned. The default is + Optional. If ``true``, the top result includes a list of words + and the confidence for those words. If ``false``, no word- + level confidence information is returned. The default is ``false``. """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.SpeechTranscriptionConfig) @@ -4610,7 +5097,7 @@ Attributes: phrases: - *Optional* A list of strings containing words and phrases + Optional. A list of strings containing words and phrases "hints" so that the speech recognition is more likely to recognize them. This can be used to improve the accuracy for specific words and phrases, for example, if specific commands @@ -4664,15 +5151,18 @@ transcript: Transcript text representing the words that the user spoke. confidence: - The confidence estimate between 0.0 and 1.0. A higher number - indicates an estimated greater likelihood that the recognized - words are correct. This field is typically provided only for - the top hypothesis, and only for ``is_final=true`` results. - Clients should not rely on the ``confidence`` field as it is - not guaranteed to be accurate or consistent. The default of - 0.0 is a sentinel value indicating ``confidence`` was not set. + Output only. The confidence estimate between 0.0 and 1.0. A + higher number indicates an estimated greater likelihood that + the recognized words are correct. This field is set only for + the top alternative. This field is not guaranteed to be + accurate and users should not rely on it to be always + provided. The default of 0.0 is a sentinel value indicating + ``confidence`` was not set. words: - A list of word-specific information for each recognized word. + Output only. A list of word-specific information for each + recognized word. Note: When ``enable_speaker_diarization`` is + true, you will see all the words from the beginning of the + audio. """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.SpeechRecognitionAlternative) ), @@ -5001,7 +5491,7 @@ dict( DESCRIPTOR=_STREAMINGAUTOMLCLASSIFICATIONCONFIG, __module__="google.cloud.videointelligence_v1p3beta1.proto.video_intelligence_pb2", - __doc__="""Config for AUTOML\_CLASSIFICATION in streaming mode. + __doc__="""Config for STREAMING\_AUTOML\_CLASSIFICATION. Attributes: @@ -5020,7 +5510,7 @@ dict( DESCRIPTOR=_STREAMINGAUTOMLOBJECTTRACKINGCONFIG, __module__="google.cloud.videointelligence_v1p3beta1.proto.video_intelligence_pb2", - __doc__="""Config for AUTOML\_OBJECT\_TRACKING in streaming mode. + __doc__="""Config for STREAMING\_AUTOML\_OBJECT\_TRACKING. Attributes: @@ -5039,9 +5529,7 @@ dict( DESCRIPTOR=_STREAMINGEXPLICITCONTENTDETECTIONCONFIG, __module__="google.cloud.videointelligence_v1p3beta1.proto.video_intelligence_pb2", - __doc__="""Config for EXPLICIT\_CONTENT\_DETECTION in streaming mode. - - No customized config support. + __doc__="""Config for STREAMING\_EXPLICIT\_CONTENT\_DETECTION. """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.StreamingExplicitContentDetectionConfig) ), @@ -5054,7 +5542,7 @@ dict( DESCRIPTOR=_STREAMINGLABELDETECTIONCONFIG, __module__="google.cloud.videointelligence_v1p3beta1.proto.video_intelligence_pb2", - __doc__="""Config for LABEL\_DETECTION in streaming mode. + __doc__="""Config for STREAMING\_LABEL\_DETECTION. Attributes: @@ -5075,8 +5563,6 @@ DESCRIPTOR=_STREAMINGOBJECTTRACKINGCONFIG, __module__="google.cloud.videointelligence_v1p3beta1.proto.video_intelligence_pb2", __doc__="""Config for STREAMING\_OBJECT\_TRACKING. - - No customized config support. """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.StreamingObjectTrackingConfig) ), @@ -5089,9 +5575,7 @@ dict( DESCRIPTOR=_STREAMINGSHOTCHANGEDETECTIONCONFIG, __module__="google.cloud.videointelligence_v1p3beta1.proto.video_intelligence_pb2", - __doc__="""Config for SHOT\_CHANGE\_DETECTION in streaming mode. - - No customized config support. + __doc__="""Config for STREAMING\_SHOT\_CHANGE\_DETECTION. """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.StreamingShotChangeDetectionConfig) ), @@ -5192,15 +5676,40 @@ DESCRIPTOR._options = None +_ANNOTATEVIDEOREQUEST.fields_by_name["features"]._options = None +_ANNOTATEVIDEOREQUEST.fields_by_name["output_uri"]._options = None +_ANNOTATEVIDEOREQUEST.fields_by_name["location_id"]._options = None +_TIMESTAMPEDOBJECT.fields_by_name["attributes"]._options = None +_TRACK.fields_by_name["attributes"]._options = None +_TRACK.fields_by_name["confidence"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["language_code"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["max_alternatives"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["filter_profanity"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["speech_contexts"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name[ + "enable_automatic_punctuation" +]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["audio_tracks"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["enable_speaker_diarization"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["diarization_speaker_count"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["enable_word_confidence"]._options = None +_SPEECHCONTEXT.fields_by_name["phrases"]._options = None +_SPEECHTRANSCRIPTION.fields_by_name["language_code"]._options = None +_SPEECHRECOGNITIONALTERNATIVE.fields_by_name["confidence"]._options = None +_SPEECHRECOGNITIONALTERNATIVE.fields_by_name["words"]._options = None +_WORDINFO.fields_by_name["confidence"]._options = None +_WORDINFO.fields_by_name["speaker_tag"]._options = None _VIDEOINTELLIGENCESERVICE = _descriptor.ServiceDescriptor( name="VideoIntelligenceService", full_name="google.cloud.videointelligence.v1p3beta1.VideoIntelligenceService", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=9548, - serialized_end=9726, + serialized_options=_b( + "\312A videointelligence.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=10762, + serialized_end=11096, methods=[ _descriptor.MethodDescriptor( name="AnnotateVideo", @@ -5210,7 +5719,7 @@ input_type=_ANNOTATEVIDEOREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002\037"\032/v1p3beta1/videos:annotate:\001*' + '\202\323\344\223\002\037"\032/v1p3beta1/videos:annotate:\001*\332A\022input_uri,features\312A.\n\025AnnotateVideoResponse\022\025AnnotateVideoProgress' ), ) ], @@ -5225,9 +5734,11 @@ full_name="google.cloud.videointelligence.v1p3beta1.StreamingVideoIntelligenceService", file=DESCRIPTOR, index=1, - serialized_options=None, - serialized_start=9729, - serialized_end=9942, + serialized_options=_b( + "\312A videointelligence.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=11099, + serialized_end=11400, methods=[ _descriptor.MethodDescriptor( name="StreamingAnnotateVideo", diff --git a/videointelligence/google/cloud/videointelligence_v1p3beta1/proto/video_intelligence_pb2_grpc.py b/videointelligence/google/cloud/videointelligence_v1p3beta1/proto/video_intelligence_pb2_grpc.py index d410892780b3..4e25eb647900 100644 --- a/videointelligence/google/cloud/videointelligence_v1p3beta1/proto/video_intelligence_pb2_grpc.py +++ b/videointelligence/google/cloud/videointelligence_v1p3beta1/proto/video_intelligence_pb2_grpc.py @@ -57,7 +57,7 @@ def add_VideoIntelligenceServiceServicer_to_server(servicer, server): class StreamingVideoIntelligenceServiceStub(object): - """Service that implements Google Cloud Video Intelligence Streaming API. + """Service that implements streaming Google Cloud Video Intelligence API. """ def __init__(self, channel): @@ -74,7 +74,7 @@ def __init__(self, channel): class StreamingVideoIntelligenceServiceServicer(object): - """Service that implements Google Cloud Video Intelligence Streaming API. + """Service that implements streaming Google Cloud Video Intelligence API. """ def StreamingAnnotateVideo(self, request_iterator, context): diff --git a/videointelligence/synth.metadata b/videointelligence/synth.metadata index 1a3cca9bbf71..31ed0d4141dd 100644 --- a/videointelligence/synth.metadata +++ b/videointelligence/synth.metadata @@ -1,40 +1,30 @@ { - "updateTime": "2019-08-07T12:41:23.743321Z", + "updateTime": "2019-11-06T13:41:12.139653Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.41.0", + "dockerImage": "googleapis/artman@sha256:75b38a3b073a7b243545f2332463096624c802bb1e56b8cb6f22ba1ecd325fa9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "6d29882872298c8bfbaef33bd69bfca275c4d2eb", - "internalRef": "262019251" + "sha": "5691fcb7c1a926b52577aa1834f31d9c50efda54", + "internalRef": "278731899" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "videointelligence", - "apiVersion": "v1beta1", - "language": "python", - "generator": "gapic", - "config": "google/cloud/videointelligence/artman_videointelligence_v1beta1.yaml" - } - }, { "client": { "source": "googleapis", diff --git a/videointelligence/synth.py b/videointelligence/synth.py index 3e5d8389829c..8cb0ac1d98cb 100644 --- a/videointelligence/synth.py +++ b/videointelligence/synth.py @@ -20,7 +20,7 @@ gapic = gcp.GAPICGenerator() common = gcp.CommonTemplates() -versions = ["v1beta1", "v1beta2", "v1p1beta1", "v1p2beta1", "v1p3beta1", "v1"] +versions = ["v1beta2", "v1p1beta1", "v1p2beta1", "v1p3beta1", "v1"] # ---------------------------------------------------------------------------- diff --git a/videointelligence/tests/system.py b/videointelligence/tests/system.py index a8ad0a9b29bf..a73e4edc1634 100644 --- a/videointelligence/tests/system.py +++ b/videointelligence/tests/system.py @@ -11,32 +11,40 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - """System tests for VideoIntelligence API.""" import json import os import requests -import time import unittest +from google.auth.transport import requests as goog_auth_requests from google.cloud import videointelligence -from google.cloud.videointelligence_v1 import enums +from google.oauth2 import service_account + +CLOUD_PLATFORM_SCOPE = "https://www.googleapis.com/auth/cloud-platform" +CREDENTIALS_FILE = os.environ.get("GOOGLE_APPLICATION_CREDENTIALS") +OUTSIDE_BUCKET = os.environ.get("GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_BUCKET") +INSIDE_BUCKET = os.environ.get("GOOGLE_CLOUD_TESTS_VPCSC_INSIDE_PERIMETER_BUCKET") +IS_INSIDE_VPCSC = os.environ.get("GOOGLE_CLOUD_TESTS_IN_VPCSC") -PROJECT_NUMBER = os.environ.get("PROJECT_NUMBER") -OUTSIDE_PROJECT_API_KEY = os.environ.get( - "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT_API_KEY" -) -OUTSIDE_IP = os.environ.get("GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_IP") -INSIDE_IP = os.environ.get("GOOGLE_CLOUD_TESTS_VPCSC_INSIDE_IP") + +def get_access_token(): + """Returns an access token. + + Generates access tokens using the provided service account key file. + """ + creds = service_account.Credentials.from_service_account_file( + CREDENTIALS_FILE, scopes=[CLOUD_PLATFORM_SCOPE] + ) + with requests.Session() as session: + creds.refresh(goog_auth_requests.Request(session=session)) + return creds.token class VideoIntelligenceSystemTestBase(unittest.TestCase): client = None - def setUp(self): - self.input_uri = "gs://cloud-samples-data/video/cat.mp4" - def setUpModule(): VideoIntelligenceSystemTestBase.client = ( @@ -44,90 +52,58 @@ def setUpModule(): ) -class TestVideoIntelligenceClient(VideoIntelligenceSystemTestBase): - def test_annotate_video(self): - features_element = enums.Feature.LABEL_DETECTION - features = [features_element] - response = self.client.annotate_video( - input_uri=self.input_uri, features=features - ) - - # Wait for the operation to complete. - # Long timeout value warranted due to https://github.com/grpc/grpc/issues/19173 - lro_timeout_seconds = 180 - start_time = time.time() - cnt = 0 - while not response.done() and (time.time() - start_time) < lro_timeout_seconds: - time.sleep(1) - cnt += 1 - if not response.done(): - self.fail( - "wait for operation timed out after {lro_timeout_seconds} seconds".format( - lro_timeout_seconds=lro_timeout_seconds - ) - ) - - result = response.result() - annotations = result.annotation_results[0] - assert len(annotations.segment_label_annotations) > 0 - - @unittest.skipUnless( - OUTSIDE_PROJECT_API_KEY, - "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT_API_KEY not set in environment.", + CREDENTIALS_FILE, "GOOGLE_APPLICATION_CREDENTIALS not set in environment." ) class TestVideoIntelligenceClientVpcSc(VideoIntelligenceSystemTestBase): - # Tests to verify VideoIntelligence service requests blocked when trying to access resources outside of a secure perimeter. + # Tests to verify VideoIntelligence service requests blocked when trying to + # access resources outside of a secure perimeter. def setUp(self): VideoIntelligenceSystemTestBase.setUp(self) # api-endpoint - self.url = "https://videointelligence.googleapis.com/v1/videos:annotate?key={}".format( - OUTSIDE_PROJECT_API_KEY - ) - self.body = { - "input_uri": self.input_uri, - "features": ["LABEL_DETECTION"], - "location_id": "us-west1", - } + self.url = "https://videointelligence.googleapis.com/v1/videos:annotate" + self.body = {"features": ["LABEL_DETECTION"], "location_id": "us-west1"} - @unittest.skipUnless(PROJECT_NUMBER, "PROJECT_NUMBER not set in environment.") @unittest.skipUnless( - OUTSIDE_IP, "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_IP not set in environment." + OUTSIDE_BUCKET, + "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_BUCKET not set in environment.", ) - def test_outside_ip_address_blocked(self): + @unittest.skipUnless( + IS_INSIDE_VPCSC, "GOOGLE_CLOUD_TESTS_IN_VPCSC not set in environment." + ) + def test_outside_perimeter_blocked(self): headers = { + "Authorization": "Bearer " + get_access_token(), "Content-Type": "application/json", - "X-User-IP": OUTSIDE_IP, - "X-Google-GFE-Cloud-Client-Network-Project-Number": PROJECT_NUMBER, } + self.body["input_uri"] = "gs://{bucket}/cat.mp4".format(bucket=OUTSIDE_BUCKET) r = requests.post(url=self.url, data=json.dumps(self.body), headers=headers) - outside_project_operation = json.loads(r.text) - print(outside_project_operation) + resp = json.loads(r.text) + print(resp) # Assert it returns permission denied from VPC SC - self.assertEqual(outside_project_operation["error"]["code"], 403) - self.assertEqual( - outside_project_operation["error"]["status"], "PERMISSION_DENIED" - ) - self.assertEqual( - outside_project_operation["error"]["details"][0]["violations"][0]["type"], - "VPC_SERVICE_CONTROLS", - ) - self.assertEqual( - outside_project_operation["error"]["message"], - "Request is prohibited by organization's policy", - ) + self.assertEqual(resp["error"]["code"], 403) + self.assertEqual(resp["error"]["status"], "PERMISSION_DENIED") - @unittest.skipUnless(PROJECT_NUMBER, "PROJECT_NUMBER not set in environment.") @unittest.skipUnless( - INSIDE_IP, "GOOGLE_CLOUD_TESTS_VPCSC_INSIDE_IP not set in environment." + INSIDE_BUCKET, + "GOOGLE_CLOUD_TESTS_VPCSC_INSIDE_PERIMETER_BUCKET not set in environment.", + ) + @unittest.skipUnless( + IS_INSIDE_VPCSC, "GOOGLE_CLOUD_TESTS_IN_VPCSC not set in environment." ) - def test_inside_ip_address_allowed(self): + def test_inside_perimeter_allowed(self): headers = { + "Authorization": "Bearer " + get_access_token(), "Content-Type": "application/json", - "X-User-IP": INSIDE_IP, - "X-Google-GFE-Cloud-Client-Network-Project-Number": PROJECT_NUMBER, } + self.body["input_uri"] = "gs://{bucket}/cat.mp4".format(bucket=INSIDE_BUCKET) r = requests.post(url=self.url, data=json.dumps(self.body), headers=headers) operation = json.loads(r.text) - # Assert it returns non-empty operation name. - self.assertNotEqual(operation["name"], "") + print(operation) + + get_op_url = "https://videointelligence.googleapis.com/v1/" + operation["name"] + get_op = requests.get(url=get_op_url, headers=headers) + get_op_resp = json.loads(get_op.text) + print(get_op_resp) + # Assert that we do not get an error. + self.assertEqual(get_op_resp["name"], operation["name"]) diff --git a/videointelligence/tests/unit/gapic/v1/test_video_intelligence_service_client_v1.py b/videointelligence/tests/unit/gapic/v1/test_video_intelligence_service_client_v1.py index fca6c1e165bd..d34b37cf76b1 100644 --- a/videointelligence/tests/unit/gapic/v1/test_video_intelligence_service_client_v1.py +++ b/videointelligence/tests/unit/gapic/v1/test_video_intelligence_service_client_v1.py @@ -83,17 +83,17 @@ def test_annotate_video(self): client = videointelligence_v1.VideoIntelligenceServiceClient() # Setup Request - input_uri = "gs://cloud-samples-data/video/cat.mp4" features_element = enums.Feature.LABEL_DETECTION features = [features_element] + input_uri = "gs://cloud-samples-data/video/cat.mp4" - response = client.annotate_video(input_uri=input_uri, features=features) + response = client.annotate_video(features, input_uri=input_uri) result = response.result() assert expected_response == result assert len(channel.requests) == 1 expected_request = video_intelligence_pb2.AnnotateVideoRequest( - input_uri=input_uri, features=features + features=features, input_uri=input_uri ) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -114,10 +114,10 @@ def test_annotate_video_exception(self): client = videointelligence_v1.VideoIntelligenceServiceClient() # Setup Request - input_uri = "gs://cloud-samples-data/video/cat.mp4" features_element = enums.Feature.LABEL_DETECTION features = [features_element] + input_uri = "gs://cloud-samples-data/video/cat.mp4" - response = client.annotate_video(input_uri=input_uri, features=features) + response = client.annotate_video(features, input_uri=input_uri) exception = response.exception() assert exception.errors[0] == error diff --git a/vision/docs/conf.py b/vision/docs/conf.py index d175e41d7fc9..4fc985d1f432 100644 --- a/vision/docs/conf.py +++ b/vision/docs/conf.py @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/vision/google/cloud/vision_v1/gapic/image_annotator_client.py b/vision/google/cloud/vision_v1/gapic/image_annotator_client.py index efe0924da129..2b726e747ed5 100644 --- a/vision/google/cloud/vision_v1/gapic/image_annotator_client.py +++ b/vision/google/cloud/vision_v1/gapic/image_annotator_client.py @@ -210,7 +210,7 @@ def batch_annotate_images( >>> response = client.batch_annotate_images(requests) Args: - requests (list[Union[dict, ~google.cloud.vision_v1.types.AnnotateImageRequest]]): Individual image annotation requests for this batch. + requests (list[Union[dict, ~google.cloud.vision_v1.types.AnnotateImageRequest]]): Required. Individual image annotation requests for this batch. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.AnnotateImageRequest` @@ -302,7 +302,7 @@ def batch_annotate_files( >>> response = client.batch_annotate_files(requests) Args: - requests (list[Union[dict, ~google.cloud.vision_v1.types.AnnotateFileRequest]]): The list of file annotation requests. Right now we support only one + requests (list[Union[dict, ~google.cloud.vision_v1.types.AnnotateFileRequest]]): Required. The list of file annotation requests. Right now we support only one AnnotateFileRequest in BatchAnnotateFilesRequest. If a dict is provided, it must be of the same form as the protobuf @@ -411,7 +411,7 @@ def async_batch_annotate_images( >>> metadata = response.metadata() Args: - requests (list[Union[dict, ~google.cloud.vision_v1.types.AnnotateImageRequest]]): Individual image annotation requests for this batch. + requests (list[Union[dict, ~google.cloud.vision_v1.types.AnnotateImageRequest]]): Required. Individual image annotation requests for this batch. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.AnnotateImageRequest` @@ -523,7 +523,7 @@ def async_batch_annotate_files( >>> metadata = response.metadata() Args: - requests (list[Union[dict, ~google.cloud.vision_v1.types.AsyncAnnotateFileRequest]]): Individual async file annotation requests for this batch. + requests (list[Union[dict, ~google.cloud.vision_v1.types.AsyncAnnotateFileRequest]]): Required. Individual async file annotation requests for this batch. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.AsyncAnnotateFileRequest` diff --git a/vision/google/cloud/vision_v1/gapic/product_search_client.py b/vision/google/cloud/vision_v1/gapic/product_search_client.py index 4849f0cd6c7f..6cb156400e18 100644 --- a/vision/google/cloud/vision_v1/gapic/product_search_client.py +++ b/vision/google/cloud/vision_v1/gapic/product_search_client.py @@ -277,10 +277,10 @@ def create_product_set( >>> response = client.create_product_set(parent, product_set) Args: - parent (str): The project in which the ProductSet should be created. + parent (str): Required. The project in which the ProductSet should be created. Format is ``projects/PROJECT_ID/locations/LOC_ID``. - product_set (Union[dict, ~google.cloud.vision_v1.types.ProductSet]): The ProductSet to create. + product_set (Union[dict, ~google.cloud.vision_v1.types.ProductSet]): Required. The ProductSet to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.ProductSet` @@ -376,7 +376,7 @@ def list_product_sets( ... pass Args: - parent (str): The project from which ProductSets should be listed. + parent (str): Required. The project from which ProductSets should be listed. Format is ``projects/PROJECT_ID/locations/LOC_ID``. page_size (int): The maximum number of resources contained in the @@ -472,7 +472,7 @@ def get_product_set( >>> response = client.get_product_set(name) Args: - name (str): Resource name of the ProductSet to get. + name (str): Required. Resource name of the ProductSet to get. Format is: ``projects/PROJECT_ID/locations/LOG_ID/productSets/PRODUCT_SET_ID`` @@ -553,7 +553,7 @@ def update_product_set( >>> response = client.update_product_set(product_set) Args: - product_set (Union[dict, ~google.cloud.vision_v1.types.ProductSet]): The ProductSet resource which replaces the one on the server. + product_set (Union[dict, ~google.cloud.vision_v1.types.ProductSet]): Required. The ProductSet resource which replaces the one on the server. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.ProductSet` @@ -636,7 +636,7 @@ def delete_product_set( >>> client.delete_product_set(name) Args: - name (str): Resource name of the ProductSet to delete. + name (str): Required. Resource name of the ProductSet to delete. Format is: ``projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`` @@ -718,10 +718,10 @@ def create_product( >>> response = client.create_product(parent, product) Args: - parent (str): The project in which the Product should be created. + parent (str): Required. The project in which the Product should be created. Format is ``projects/PROJECT_ID/locations/LOC_ID``. - product (Union[dict, ~google.cloud.vision_v1.types.Product]): The product to create. + product (Union[dict, ~google.cloud.vision_v1.types.Product]): Required. The product to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.Product` @@ -817,7 +817,8 @@ def list_products( ... pass Args: - parent (str): The project OR ProductSet from which Products should be listed. + parent (str): Required. The project OR ProductSet from which Products should be + listed. Format: ``projects/PROJECT_ID/locations/LOC_ID`` page_size (int): The maximum number of resources contained in the @@ -913,7 +914,7 @@ def get_product( >>> response = client.get_product(name) Args: - name (str): Resource name of the Product to get. + name (str): Required. Resource name of the Product to get. Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1000,7 +1001,7 @@ def update_product( >>> response = client.update_product(product) Args: - product (Union[dict, ~google.cloud.vision_v1.types.Product]): The Product resource which replaces the one on the server. + product (Union[dict, ~google.cloud.vision_v1.types.Product]): Required. The Product resource which replaces the one on the server. product.name is immutable. If a dict is provided, it must be of the same form as the protobuf @@ -1085,7 +1086,7 @@ def delete_product( >>> client.delete_product(name) Args: - name (str): Resource name of product to delete. + name (str): Required. Resource name of product to delete. Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1178,10 +1179,11 @@ def create_reference_image( >>> response = client.create_reference_image(parent, reference_image) Args: - parent (str): Resource name of the product in which to create the reference image. + parent (str): Required. Resource name of the product in which to create the reference + image. Format is ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID``. - reference_image (Union[dict, ~google.cloud.vision_v1.types.ReferenceImage]): The reference image to create. + reference_image (Union[dict, ~google.cloud.vision_v1.types.ReferenceImage]): Required. The reference image to create. If an image ID is specified, it is ignored. If a dict is provided, it must be of the same form as the protobuf @@ -1268,7 +1270,7 @@ def delete_reference_image( >>> client.delete_reference_image(name) Args: - name (str): The resource name of the reference image to delete. + name (str): Required. The resource name of the reference image to delete. Format is: @@ -1357,7 +1359,7 @@ def list_reference_images( ... pass Args: - parent (str): Resource name of the product containing the reference images. + parent (str): Required. Resource name of the product containing the reference images. Format is ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID``. page_size (int): The maximum number of resources contained in the @@ -1453,7 +1455,7 @@ def get_reference_image( >>> response = client.get_reference_image(name) Args: - name (str): The resource name of the ReferenceImage to get. + name (str): Required. The resource name of the ReferenceImage to get. Format is: @@ -1535,11 +1537,12 @@ def add_product_to_product_set( >>> client.add_product_to_product_set(name, product) Args: - name (str): The resource name for the ProductSet to modify. + name (str): Required. The resource name for the ProductSet to modify. Format is: ``projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`` - product (str): The resource name for the Product to be added to this ProductSet. + product (str): Required. The resource name for the Product to be added to this + ProductSet. Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1611,11 +1614,12 @@ def remove_product_from_product_set( >>> client.remove_product_from_product_set(name, product) Args: - name (str): The resource name for the ProductSet to modify. + name (str): Required. The resource name for the ProductSet to modify. Format is: ``projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`` - product (str): The resource name for the Product to be removed from this ProductSet. + product (str): Required. The resource name for the Product to be removed from this + ProductSet. Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1707,7 +1711,7 @@ def list_products_in_product_set( ... pass Args: - name (str): The ProductSet resource for which to retrieve Products. + name (str): Required. The ProductSet resource for which to retrieve Products. Format is: ``projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`` @@ -1825,10 +1829,10 @@ def import_product_sets( >>> metadata = response.metadata() Args: - parent (str): The project in which the ProductSets should be imported. + parent (str): Required. The project in which the ProductSets should be imported. Format is ``projects/PROJECT_ID/locations/LOC_ID``. - input_config (Union[dict, ~google.cloud.vision_v1.types.ImportProductSetsInputConfig]): The input content for the list of requests. + input_config (Union[dict, ~google.cloud.vision_v1.types.ImportProductSetsInputConfig]): Required. The input content for the list of requests. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1.types.ImportProductSetsInputConfig` @@ -1943,7 +1947,8 @@ def purge_products( >>> metadata = response.metadata() Args: - parent (str): The project and location in which the Products should be deleted. + parent (str): Required. The project and location in which the Products should be + deleted. Format is ``projects/PROJECT_ID/locations/LOC_ID``. product_set_purge_config (Union[dict, ~google.cloud.vision_v1.types.ProductSetPurgeConfig]): Specify which ProductSet contains the Products to be deleted. diff --git a/vision/google/cloud/vision_v1/proto/image_annotator.proto b/vision/google/cloud/vision_v1/proto/image_annotator.proto index e4ca1539350a..d2588604605a 100644 --- a/vision/google/cloud/vision_v1/proto/image_annotator.proto +++ b/vision/google/cloud/vision_v1/proto/image_annotator.proto @@ -19,6 +19,7 @@ package google.cloud.vision.v1; import "google/api/annotations.proto"; import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/cloud/vision/v1/geometry.proto"; import "google/cloud/vision/v1/product_search.proto"; import "google/cloud/vision/v1/text_annotation.proto"; @@ -47,8 +48,7 @@ service ImageAnnotator { "https://www.googleapis.com/auth/cloud-vision"; // Run image detection and annotation for a batch of images. - rpc BatchAnnotateImages(BatchAnnotateImagesRequest) - returns (BatchAnnotateImagesResponse) { + rpc BatchAnnotateImages(BatchAnnotateImagesRequest) returns (BatchAnnotateImagesResponse) { option (google.api.http) = { post: "/v1/images:annotate" body: "*" @@ -61,6 +61,7 @@ service ImageAnnotator { body: "*" } }; + option (google.api.method_signature) = "requests"; } // Service that performs image detection and annotation for a batch of files. @@ -70,8 +71,7 @@ service ImageAnnotator { // AnnotateFileRequest.pages) frames (gif) or pages (pdf or tiff) from each // file provided and perform detection and annotation for each image // extracted. - rpc BatchAnnotateFiles(BatchAnnotateFilesRequest) - returns (BatchAnnotateFilesResponse) { + rpc BatchAnnotateFiles(BatchAnnotateFilesRequest) returns (BatchAnnotateFilesResponse) { option (google.api.http) = { post: "/v1/files:annotate" body: "*" @@ -84,6 +84,7 @@ service ImageAnnotator { body: "*" } }; + option (google.api.method_signature) = "requests"; } // Run asynchronous image detection and annotation for a list of images. @@ -95,8 +96,7 @@ service ImageAnnotator { // // This service will write image annotation outputs to json files in customer // GCS bucket, each json file containing BatchAnnotateImagesResponse proto. - rpc AsyncBatchAnnotateImages(AsyncBatchAnnotateImagesRequest) - returns (google.longrunning.Operation) { + rpc AsyncBatchAnnotateImages(AsyncBatchAnnotateImagesRequest) returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1/images:asyncBatchAnnotate" body: "*" @@ -109,6 +109,11 @@ service ImageAnnotator { body: "*" } }; + option (google.api.method_signature) = "requests,output_config"; + option (google.longrunning.operation_info) = { + response_type: "AsyncBatchAnnotateImagesResponse" + metadata_type: "OperationMetadata" + }; } // Run asynchronous image detection and annotation for a list of generic @@ -117,8 +122,7 @@ service ImageAnnotator { // `google.longrunning.Operations` interface. // `Operation.metadata` contains `OperationMetadata` (metadata). // `Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results). - rpc AsyncBatchAnnotateFiles(AsyncBatchAnnotateFilesRequest) - returns (google.longrunning.Operation) { + rpc AsyncBatchAnnotateFiles(AsyncBatchAnnotateFilesRequest) returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1/files:asyncBatchAnnotate" body: "*" @@ -131,6 +135,11 @@ service ImageAnnotator { body: "*" } }; + option (google.api.method_signature) = "requests"; + option (google.longrunning.operation_info) = { + response_type: "AsyncBatchAnnotateFilesResponse" + metadata_type: "OperationMetadata" + }; } } @@ -198,28 +207,6 @@ message Feature { string model = 3; } -// A bucketized representation of likelihood, which is intended to give clients -// highly stable results across model upgrades. -enum Likelihood { - // Unknown likelihood. - UNKNOWN = 0; - - // It is very unlikely. - VERY_UNLIKELY = 1; - - // It is unlikely. - UNLIKELY = 2; - - // It is possible. - POSSIBLE = 3; - - // It is likely. - LIKELY = 4; - - // It is very likely. - VERY_LIKELY = 5; -} - // External image source (Google Cloud Storage or web URL image location). message ImageSource { // **Use `image_uri` instead.** @@ -263,6 +250,28 @@ message Image { ImageSource source = 2; } +// A bucketized representation of likelihood, which is intended to give clients +// highly stable results across model upgrades. +enum Likelihood { + // Unknown likelihood. + UNKNOWN = 0; + + // It is very unlikely. + VERY_UNLIKELY = 1; + + // It is unlikely. + UNLIKELY = 2; + + // It is possible. + POSSIBLE = 3; + + // It is likely. + LIKELY = 4; + + // It is very likely. + VERY_LIKELY = 5; +} + // A face annotation object contains the results of face detection. message FaceAnnotation { // A face-specific landmark (for example, a face feature). @@ -557,6 +566,30 @@ message SafeSearchAnnotation { // covered nudity, lewd or provocative poses, or close-ups of sensitive // body areas. Likelihood racy = 9; + + // Confidence of adult_score. Range [0, 1]. 0 means not confident, 1 means + // very confident. + float adult_confidence = 16; + + // Confidence of spoof_score. Range [0, 1]. 0 means not confident, 1 means + // very confident. + float spoof_confidence = 18; + + // Confidence of medical_score. Range [0, 1]. 0 means not confident, 1 means + // very confident. + float medical_confidence = 20; + + // Confidence of violence_score. Range [0, 1]. 0 means not confident, 1 means + // very confident. + float violence_confidence = 22; + + // Confidence of racy_score. Range [0, 1]. 0 means not confident, 1 means very + // confident. + float racy_confidence = 24; + + // Confidence of nsfw_score. Range [0, 1]. 0 means not confident, 1 means very + // confident. + float nsfw_confidence = 26; } // Rectangle determined by min and max `LatLng` pairs. @@ -732,28 +765,10 @@ message AnnotateImageResponse { ImageAnnotationContext context = 21; } -// Response to a single file annotation request. A file may contain one or more -// images, which individually have their own responses. -message AnnotateFileResponse { - // Information about the file for which this response is generated. - InputConfig input_config = 1; - - // Individual responses to images found within the file. This field will be - // empty if the `error` field is set. - repeated AnnotateImageResponse responses = 2; - - // This field gives the total number of pages in the file. - int32 total_pages = 3; - - // If set, represents the error message for the failed request. The - // `responses` field will not be set in this case. - google.rpc.Status error = 4; -} - // Multiple image annotation requests are batched into a single service call. message BatchAnnotateImagesRequest { - // Individual image annotation requests for this batch. - repeated AnnotateImageRequest requests = 1; + // Required. Individual image annotation requests for this batch. + repeated AnnotateImageRequest requests = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Target project and location to make a call. // @@ -804,11 +819,29 @@ message AnnotateFileRequest { repeated int32 pages = 4; } +// Response to a single file annotation request. A file may contain one or more +// images, which individually have their own responses. +message AnnotateFileResponse { + // Information about the file for which this response is generated. + InputConfig input_config = 1; + + // Individual responses to images found within the file. This field will be + // empty if the `error` field is set. + repeated AnnotateImageResponse responses = 2; + + // This field gives the total number of pages in the file. + int32 total_pages = 3; + + // If set, represents the error message for the failed request. The + // `responses` field will not be set in this case. + google.rpc.Status error = 4; +} + // A list of requests to annotate files using the BatchAnnotateFiles API. message BatchAnnotateFilesRequest { - // The list of file annotation requests. Right now we support only one + // Required. The list of file annotation requests. Right now we support only one // AnnotateFileRequest in BatchAnnotateFilesRequest. - repeated AnnotateFileRequest requests = 1; + repeated AnnotateFileRequest requests = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Target project and location to make a call. // @@ -855,11 +888,11 @@ message AsyncAnnotateFileResponse { // Request for async image annotation for a list of images. message AsyncBatchAnnotateImagesRequest { - // Individual image annotation requests for this batch. - repeated AnnotateImageRequest requests = 1; + // Required. Individual image annotation requests for this batch. + repeated AnnotateImageRequest requests = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The desired output location and metadata (e.g. format). - OutputConfig output_config = 2; + OutputConfig output_config = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. Target project and location to make a call. // @@ -885,8 +918,8 @@ message AsyncBatchAnnotateImagesResponse { // Multiple async file annotation requests are batched into a single service // call. message AsyncBatchAnnotateFilesRequest { - // Individual async file annotation requests for this batch. - repeated AsyncAnnotateFileRequest requests = 1; + // Required. Individual async file annotation requests for this batch. + repeated AsyncAnnotateFileRequest requests = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Target project and location to make a call. // diff --git a/vision/google/cloud/vision_v1/proto/image_annotator_pb2.py b/vision/google/cloud/vision_v1/proto/image_annotator_pb2.py index 1ea8f0ff05e7..0cc11b9ee577 100644 --- a/vision/google/cloud/vision_v1/proto/image_annotator_pb2.py +++ b/vision/google/cloud/vision_v1/proto/image_annotator_pb2.py @@ -18,6 +18,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.vision_v1.proto import ( geometry_pb2 as google_dot_cloud_dot_vision__v1_dot_proto_dot_geometry__pb2, ) @@ -48,11 +49,12 @@ "\n\032com.google.cloud.vision.v1B\023ImageAnnotatorProtoP\001Z\n\x10\x66\x64_bounding_poly\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12\x42\n\tlandmarks\x18\x03 \x03(\x0b\x32/.google.cloud.vision.v1.FaceAnnotation.Landmark\x12\x12\n\nroll_angle\x18\x04 \x01(\x02\x12\x11\n\tpan_angle\x18\x05 \x01(\x02\x12\x12\n\ntilt_angle\x18\x06 \x01(\x02\x12\x1c\n\x14\x64\x65tection_confidence\x18\x07 \x01(\x02\x12\x1e\n\x16landmarking_confidence\x18\x08 \x01(\x02\x12:\n\x0ejoy_likelihood\x18\t \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12=\n\x11sorrow_likelihood\x18\n \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12<\n\x10\x61nger_likelihood\x18\x0b \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12?\n\x13surprise_likelihood\x18\x0c \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12\x44\n\x18under_exposed_likelihood\x18\r \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12>\n\x12\x62lurred_likelihood\x18\x0e \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12?\n\x13headwear_likelihood\x18\x0f \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x1a\xb9\x07\n\x08Landmark\x12\x42\n\x04type\x18\x03 \x01(\x0e\x32\x34.google.cloud.vision.v1.FaceAnnotation.Landmark.Type\x12\x32\n\x08position\x18\x04 \x01(\x0b\x32 .google.cloud.vision.v1.Position"\xb4\x06\n\x04Type\x12\x14\n\x10UNKNOWN_LANDMARK\x10\x00\x12\x0c\n\x08LEFT_EYE\x10\x01\x12\r\n\tRIGHT_EYE\x10\x02\x12\x18\n\x14LEFT_OF_LEFT_EYEBROW\x10\x03\x12\x19\n\x15RIGHT_OF_LEFT_EYEBROW\x10\x04\x12\x19\n\x15LEFT_OF_RIGHT_EYEBROW\x10\x05\x12\x1a\n\x16RIGHT_OF_RIGHT_EYEBROW\x10\x06\x12\x19\n\x15MIDPOINT_BETWEEN_EYES\x10\x07\x12\x0c\n\x08NOSE_TIP\x10\x08\x12\r\n\tUPPER_LIP\x10\t\x12\r\n\tLOWER_LIP\x10\n\x12\x0e\n\nMOUTH_LEFT\x10\x0b\x12\x0f\n\x0bMOUTH_RIGHT\x10\x0c\x12\x10\n\x0cMOUTH_CENTER\x10\r\x12\x15\n\x11NOSE_BOTTOM_RIGHT\x10\x0e\x12\x14\n\x10NOSE_BOTTOM_LEFT\x10\x0f\x12\x16\n\x12NOSE_BOTTOM_CENTER\x10\x10\x12\x19\n\x15LEFT_EYE_TOP_BOUNDARY\x10\x11\x12\x19\n\x15LEFT_EYE_RIGHT_CORNER\x10\x12\x12\x1c\n\x18LEFT_EYE_BOTTOM_BOUNDARY\x10\x13\x12\x18\n\x14LEFT_EYE_LEFT_CORNER\x10\x14\x12\x1a\n\x16RIGHT_EYE_TOP_BOUNDARY\x10\x15\x12\x1a\n\x16RIGHT_EYE_RIGHT_CORNER\x10\x16\x12\x1d\n\x19RIGHT_EYE_BOTTOM_BOUNDARY\x10\x17\x12\x19\n\x15RIGHT_EYE_LEFT_CORNER\x10\x18\x12\x1f\n\x1bLEFT_EYEBROW_UPPER_MIDPOINT\x10\x19\x12 \n\x1cRIGHT_EYEBROW_UPPER_MIDPOINT\x10\x1a\x12\x14\n\x10LEFT_EAR_TRAGION\x10\x1b\x12\x15\n\x11RIGHT_EAR_TRAGION\x10\x1c\x12\x12\n\x0eLEFT_EYE_PUPIL\x10\x1d\x12\x13\n\x0fRIGHT_EYE_PUPIL\x10\x1e\x12\x15\n\x11\x46OREHEAD_GLABELLA\x10\x1f\x12\x11\n\rCHIN_GNATHION\x10 \x12\x14\n\x10\x43HIN_LEFT_GONION\x10!\x12\x15\n\x11\x43HIN_RIGHT_GONION\x10""4\n\x0cLocationInfo\x12$\n\x07lat_lng\x18\x01 \x01(\x0b\x32\x13.google.type.LatLng"=\n\x08Property\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\x12\x14\n\x0cuint64_value\x18\x03 \x01(\x04"\xab\x02\n\x10\x45ntityAnnotation\x12\x0b\n\x03mid\x18\x01 \x01(\t\x12\x0e\n\x06locale\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\r\n\x05score\x18\x04 \x01(\x02\x12\x16\n\nconfidence\x18\x05 \x01(\x02\x42\x02\x18\x01\x12\x12\n\ntopicality\x18\x06 \x01(\x02\x12;\n\rbounding_poly\x18\x07 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12\x37\n\tlocations\x18\x08 \x03(\x0b\x32$.google.cloud.vision.v1.LocationInfo\x12\x34\n\nproperties\x18\t \x03(\x0b\x32 .google.cloud.vision.v1.Property"\x99\x01\n\x19LocalizedObjectAnnotation\x12\x0b\n\x03mid\x18\x01 \x01(\t\x12\x15\n\rlanguage_code\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\r\n\x05score\x18\x04 \x01(\x02\x12;\n\rbounding_poly\x18\x05 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly"\x99\x02\n\x14SafeSearchAnnotation\x12\x31\n\x05\x61\x64ult\x18\x01 \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12\x31\n\x05spoof\x18\x02 \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12\x33\n\x07medical\x18\x03 \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12\x34\n\x08violence\x18\x04 \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12\x30\n\x04racy\x18\t \x01(\x0e\x32".google.cloud.vision.v1.Likelihood"a\n\x0bLatLongRect\x12(\n\x0bmin_lat_lng\x18\x01 \x01(\x0b\x32\x13.google.type.LatLng\x12(\n\x0bmax_lat_lng\x18\x02 \x01(\x0b\x32\x13.google.type.LatLng"U\n\tColorInfo\x12!\n\x05\x63olor\x18\x01 \x01(\x0b\x32\x12.google.type.Color\x12\r\n\x05score\x18\x02 \x01(\x02\x12\x16\n\x0epixel_fraction\x18\x03 \x01(\x02"M\n\x18\x44ominantColorsAnnotation\x12\x31\n\x06\x63olors\x18\x01 \x03(\x0b\x32!.google.cloud.vision.v1.ColorInfo"\\\n\x0fImageProperties\x12I\n\x0f\x64ominant_colors\x18\x01 \x01(\x0b\x32\x30.google.cloud.vision.v1.DominantColorsAnnotation"x\n\x08\x43ropHint\x12;\n\rbounding_poly\x18\x01 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\x1b\n\x13importance_fraction\x18\x03 \x01(\x02"K\n\x13\x43ropHintsAnnotation\x12\x34\n\ncrop_hints\x18\x01 \x03(\x0b\x32 .google.cloud.vision.v1.CropHint"(\n\x0f\x43ropHintsParams\x12\x15\n\raspect_ratios\x18\x01 \x03(\x02"1\n\x12WebDetectionParams\x12\x1b\n\x13include_geo_results\x18\x02 \x01(\x08"\xbc\x02\n\x0cImageContext\x12:\n\rlat_long_rect\x18\x01 \x01(\x0b\x32#.google.cloud.vision.v1.LatLongRect\x12\x16\n\x0elanguage_hints\x18\x02 \x03(\t\x12\x42\n\x11\x63rop_hints_params\x18\x04 \x01(\x0b\x32\'.google.cloud.vision.v1.CropHintsParams\x12J\n\x15product_search_params\x18\x05 \x01(\x0b\x32+.google.cloud.vision.v1.ProductSearchParams\x12H\n\x14web_detection_params\x18\x06 \x01(\x0b\x32*.google.cloud.vision.v1.WebDetectionParams"\xb4\x01\n\x14\x41nnotateImageRequest\x12,\n\x05image\x18\x01 \x01(\x0b\x32\x1d.google.cloud.vision.v1.Image\x12\x31\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32\x1f.google.cloud.vision.v1.Feature\x12;\n\rimage_context\x18\x03 \x01(\x0b\x32$.google.cloud.vision.v1.ImageContext":\n\x16ImageAnnotationContext\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12\x13\n\x0bpage_number\x18\x02 \x01(\x05"\xe4\x07\n\x15\x41nnotateImageResponse\x12@\n\x10\x66\x61\x63\x65_annotations\x18\x01 \x03(\x0b\x32&.google.cloud.vision.v1.FaceAnnotation\x12\x46\n\x14landmark_annotations\x18\x02 \x03(\x0b\x32(.google.cloud.vision.v1.EntityAnnotation\x12\x42\n\x10logo_annotations\x18\x03 \x03(\x0b\x32(.google.cloud.vision.v1.EntityAnnotation\x12\x43\n\x11label_annotations\x18\x04 \x03(\x0b\x32(.google.cloud.vision.v1.EntityAnnotation\x12W\n\x1clocalized_object_annotations\x18\x16 \x03(\x0b\x32\x31.google.cloud.vision.v1.LocalizedObjectAnnotation\x12\x42\n\x10text_annotations\x18\x05 \x03(\x0b\x32(.google.cloud.vision.v1.EntityAnnotation\x12\x44\n\x14\x66ull_text_annotation\x18\x0c \x01(\x0b\x32&.google.cloud.vision.v1.TextAnnotation\x12L\n\x16safe_search_annotation\x18\x06 \x01(\x0b\x32,.google.cloud.vision.v1.SafeSearchAnnotation\x12L\n\x1bimage_properties_annotation\x18\x08 \x01(\x0b\x32\'.google.cloud.vision.v1.ImageProperties\x12J\n\x15\x63rop_hints_annotation\x18\x0b \x01(\x0b\x32+.google.cloud.vision.v1.CropHintsAnnotation\x12;\n\rweb_detection\x18\r \x01(\x0b\x32$.google.cloud.vision.v1.WebDetection\x12L\n\x16product_search_results\x18\x0e \x01(\x0b\x32,.google.cloud.vision.v1.ProductSearchResults\x12!\n\x05\x65rror\x18\t \x01(\x0b\x32\x12.google.rpc.Status\x12?\n\x07\x63ontext\x18\x15 \x01(\x0b\x32..google.cloud.vision.v1.ImageAnnotationContext"\xcb\x01\n\x14\x41nnotateFileResponse\x12\x39\n\x0cinput_config\x18\x01 \x01(\x0b\x32#.google.cloud.vision.v1.InputConfig\x12@\n\tresponses\x18\x02 \x03(\x0b\x32-.google.cloud.vision.v1.AnnotateImageResponse\x12\x13\n\x0btotal_pages\x18\x03 \x01(\x05\x12!\n\x05\x65rror\x18\x04 \x01(\x0b\x32\x12.google.rpc.Status"l\n\x1a\x42\x61tchAnnotateImagesRequest\x12>\n\x08requests\x18\x01 \x03(\x0b\x32,.google.cloud.vision.v1.AnnotateImageRequest\x12\x0e\n\x06parent\x18\x04 \x01(\t"_\n\x1b\x42\x61tchAnnotateImagesResponse\x12@\n\tresponses\x18\x01 \x03(\x0b\x32-.google.cloud.vision.v1.AnnotateImageResponse"\xcf\x01\n\x13\x41nnotateFileRequest\x12\x39\n\x0cinput_config\x18\x01 \x01(\x0b\x32#.google.cloud.vision.v1.InputConfig\x12\x31\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32\x1f.google.cloud.vision.v1.Feature\x12;\n\rimage_context\x18\x03 \x01(\x0b\x32$.google.cloud.vision.v1.ImageContext\x12\r\n\x05pages\x18\x04 \x03(\x05"j\n\x19\x42\x61tchAnnotateFilesRequest\x12=\n\x08requests\x18\x01 \x03(\x0b\x32+.google.cloud.vision.v1.AnnotateFileRequest\x12\x0e\n\x06parent\x18\x03 \x01(\t"]\n\x1a\x42\x61tchAnnotateFilesResponse\x12?\n\tresponses\x18\x01 \x03(\x0b\x32,.google.cloud.vision.v1.AnnotateFileResponse"\x82\x02\n\x18\x41syncAnnotateFileRequest\x12\x39\n\x0cinput_config\x18\x01 \x01(\x0b\x32#.google.cloud.vision.v1.InputConfig\x12\x31\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32\x1f.google.cloud.vision.v1.Feature\x12;\n\rimage_context\x18\x03 \x01(\x0b\x32$.google.cloud.vision.v1.ImageContext\x12;\n\routput_config\x18\x04 \x01(\x0b\x32$.google.cloud.vision.v1.OutputConfig"X\n\x19\x41syncAnnotateFileResponse\x12;\n\routput_config\x18\x01 \x01(\x0b\x32$.google.cloud.vision.v1.OutputConfig"\xae\x01\n\x1f\x41syncBatchAnnotateImagesRequest\x12>\n\x08requests\x18\x01 \x03(\x0b\x32,.google.cloud.vision.v1.AnnotateImageRequest\x12;\n\routput_config\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.OutputConfig\x12\x0e\n\x06parent\x18\x04 \x01(\t"_\n AsyncBatchAnnotateImagesResponse\x12;\n\routput_config\x18\x01 \x01(\x0b\x32$.google.cloud.vision.v1.OutputConfig"t\n\x1e\x41syncBatchAnnotateFilesRequest\x12\x42\n\x08requests\x18\x01 \x03(\x0b\x32\x30.google.cloud.vision.v1.AsyncAnnotateFileRequest\x12\x0e\n\x06parent\x18\x04 \x01(\t"g\n\x1f\x41syncBatchAnnotateFilesResponse\x12\x44\n\tresponses\x18\x01 \x03(\x0b\x32\x31.google.cloud.vision.v1.AsyncAnnotateFileResponse"h\n\x0bInputConfig\x12\x35\n\ngcs_source\x18\x01 \x01(\x0b\x32!.google.cloud.vision.v1.GcsSource\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\x0c\x12\x11\n\tmime_type\x18\x02 \x01(\t"c\n\x0cOutputConfig\x12?\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32&.google.cloud.vision.v1.GcsDestination\x12\x12\n\nbatch_size\x18\x02 \x01(\x05"\x18\n\tGcsSource\x12\x0b\n\x03uri\x18\x01 \x01(\t"\x1d\n\x0eGcsDestination\x12\x0b\n\x03uri\x18\x01 \x01(\t"\x88\x02\n\x11OperationMetadata\x12>\n\x05state\x18\x01 \x01(\x0e\x32/.google.cloud.vision.v1.OperationMetadata.State\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"Q\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x43REATED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03\x12\r\n\tCANCELLED\x10\x04*e\n\nLikelihood\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05\x32\xcc\t\n\x0eImageAnnotator\x12\x88\x02\n\x13\x42\x61tchAnnotateImages\x12\x32.google.cloud.vision.v1.BatchAnnotateImagesRequest\x1a\x33.google.cloud.vision.v1.BatchAnnotateImagesResponse"\x87\x01\x82\xd3\xe4\x93\x02\x80\x01"\x13/v1/images:annotate:\x01*Z8"3/v1/{parent=projects/*/locations/*}/images:annotate:\x01*Z,"\'/v1/{parent=projects/*}/images:annotate:\x01*\x12\x81\x02\n\x12\x42\x61tchAnnotateFiles\x12\x31.google.cloud.vision.v1.BatchAnnotateFilesRequest\x1a\x32.google.cloud.vision.v1.BatchAnnotateFilesResponse"\x83\x01\x82\xd3\xe4\x93\x02}"\x12/v1/files:annotate:\x01*Z7"2/v1/{parent=projects/*/locations/*}/files:annotate:\x01*Z+"&/v1/{parent=projects/*}/files:annotate:\x01*\x12\x9a\x02\n\x18\x41syncBatchAnnotateImages\x12\x37.google.cloud.vision.v1.AsyncBatchAnnotateImagesRequest\x1a\x1d.google.longrunning.Operation"\xa5\x01\x82\xd3\xe4\x93\x02\x9e\x01"\x1d/v1/images:asyncBatchAnnotate:\x01*ZB"=/v1/{parent=projects/*/locations/*}/images:asyncBatchAnnotate:\x01*Z6"1/v1/{parent=projects/*}/images:asyncBatchAnnotate:\x01*\x12\x95\x02\n\x17\x41syncBatchAnnotateFiles\x12\x36.google.cloud.vision.v1.AsyncBatchAnnotateFilesRequest\x1a\x1d.google.longrunning.Operation"\xa2\x01\x82\xd3\xe4\x93\x02\x9b\x01"\x1c/v1/files:asyncBatchAnnotate:\x01*ZA"\n\x10\x66\x64_bounding_poly\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12\x42\n\tlandmarks\x18\x03 \x03(\x0b\x32/.google.cloud.vision.v1.FaceAnnotation.Landmark\x12\x12\n\nroll_angle\x18\x04 \x01(\x02\x12\x11\n\tpan_angle\x18\x05 \x01(\x02\x12\x12\n\ntilt_angle\x18\x06 \x01(\x02\x12\x1c\n\x14\x64\x65tection_confidence\x18\x07 \x01(\x02\x12\x1e\n\x16landmarking_confidence\x18\x08 \x01(\x02\x12:\n\x0ejoy_likelihood\x18\t \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12=\n\x11sorrow_likelihood\x18\n \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12<\n\x10\x61nger_likelihood\x18\x0b \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12?\n\x13surprise_likelihood\x18\x0c \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12\x44\n\x18under_exposed_likelihood\x18\r \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12>\n\x12\x62lurred_likelihood\x18\x0e \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12?\n\x13headwear_likelihood\x18\x0f \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x1a\xb9\x07\n\x08Landmark\x12\x42\n\x04type\x18\x03 \x01(\x0e\x32\x34.google.cloud.vision.v1.FaceAnnotation.Landmark.Type\x12\x32\n\x08position\x18\x04 \x01(\x0b\x32 .google.cloud.vision.v1.Position"\xb4\x06\n\x04Type\x12\x14\n\x10UNKNOWN_LANDMARK\x10\x00\x12\x0c\n\x08LEFT_EYE\x10\x01\x12\r\n\tRIGHT_EYE\x10\x02\x12\x18\n\x14LEFT_OF_LEFT_EYEBROW\x10\x03\x12\x19\n\x15RIGHT_OF_LEFT_EYEBROW\x10\x04\x12\x19\n\x15LEFT_OF_RIGHT_EYEBROW\x10\x05\x12\x1a\n\x16RIGHT_OF_RIGHT_EYEBROW\x10\x06\x12\x19\n\x15MIDPOINT_BETWEEN_EYES\x10\x07\x12\x0c\n\x08NOSE_TIP\x10\x08\x12\r\n\tUPPER_LIP\x10\t\x12\r\n\tLOWER_LIP\x10\n\x12\x0e\n\nMOUTH_LEFT\x10\x0b\x12\x0f\n\x0bMOUTH_RIGHT\x10\x0c\x12\x10\n\x0cMOUTH_CENTER\x10\r\x12\x15\n\x11NOSE_BOTTOM_RIGHT\x10\x0e\x12\x14\n\x10NOSE_BOTTOM_LEFT\x10\x0f\x12\x16\n\x12NOSE_BOTTOM_CENTER\x10\x10\x12\x19\n\x15LEFT_EYE_TOP_BOUNDARY\x10\x11\x12\x19\n\x15LEFT_EYE_RIGHT_CORNER\x10\x12\x12\x1c\n\x18LEFT_EYE_BOTTOM_BOUNDARY\x10\x13\x12\x18\n\x14LEFT_EYE_LEFT_CORNER\x10\x14\x12\x1a\n\x16RIGHT_EYE_TOP_BOUNDARY\x10\x15\x12\x1a\n\x16RIGHT_EYE_RIGHT_CORNER\x10\x16\x12\x1d\n\x19RIGHT_EYE_BOTTOM_BOUNDARY\x10\x17\x12\x19\n\x15RIGHT_EYE_LEFT_CORNER\x10\x18\x12\x1f\n\x1bLEFT_EYEBROW_UPPER_MIDPOINT\x10\x19\x12 \n\x1cRIGHT_EYEBROW_UPPER_MIDPOINT\x10\x1a\x12\x14\n\x10LEFT_EAR_TRAGION\x10\x1b\x12\x15\n\x11RIGHT_EAR_TRAGION\x10\x1c\x12\x12\n\x0eLEFT_EYE_PUPIL\x10\x1d\x12\x13\n\x0fRIGHT_EYE_PUPIL\x10\x1e\x12\x15\n\x11\x46OREHEAD_GLABELLA\x10\x1f\x12\x11\n\rCHIN_GNATHION\x10 \x12\x14\n\x10\x43HIN_LEFT_GONION\x10!\x12\x15\n\x11\x43HIN_RIGHT_GONION\x10""4\n\x0cLocationInfo\x12$\n\x07lat_lng\x18\x01 \x01(\x0b\x32\x13.google.type.LatLng"=\n\x08Property\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\x12\x14\n\x0cuint64_value\x18\x03 \x01(\x04"\xab\x02\n\x10\x45ntityAnnotation\x12\x0b\n\x03mid\x18\x01 \x01(\t\x12\x0e\n\x06locale\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\r\n\x05score\x18\x04 \x01(\x02\x12\x16\n\nconfidence\x18\x05 \x01(\x02\x42\x02\x18\x01\x12\x12\n\ntopicality\x18\x06 \x01(\x02\x12;\n\rbounding_poly\x18\x07 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12\x37\n\tlocations\x18\x08 \x03(\x0b\x32$.google.cloud.vision.v1.LocationInfo\x12\x34\n\nproperties\x18\t \x03(\x0b\x32 .google.cloud.vision.v1.Property"\x99\x01\n\x19LocalizedObjectAnnotation\x12\x0b\n\x03mid\x18\x01 \x01(\t\x12\x15\n\rlanguage_code\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\r\n\x05score\x18\x04 \x01(\x02\x12;\n\rbounding_poly\x18\x05 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly"\xb8\x03\n\x14SafeSearchAnnotation\x12\x31\n\x05\x61\x64ult\x18\x01 \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12\x31\n\x05spoof\x18\x02 \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12\x33\n\x07medical\x18\x03 \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12\x34\n\x08violence\x18\x04 \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12\x30\n\x04racy\x18\t \x01(\x0e\x32".google.cloud.vision.v1.Likelihood\x12\x18\n\x10\x61\x64ult_confidence\x18\x10 \x01(\x02\x12\x18\n\x10spoof_confidence\x18\x12 \x01(\x02\x12\x1a\n\x12medical_confidence\x18\x14 \x01(\x02\x12\x1b\n\x13violence_confidence\x18\x16 \x01(\x02\x12\x17\n\x0fracy_confidence\x18\x18 \x01(\x02\x12\x17\n\x0fnsfw_confidence\x18\x1a \x01(\x02"a\n\x0bLatLongRect\x12(\n\x0bmin_lat_lng\x18\x01 \x01(\x0b\x32\x13.google.type.LatLng\x12(\n\x0bmax_lat_lng\x18\x02 \x01(\x0b\x32\x13.google.type.LatLng"U\n\tColorInfo\x12!\n\x05\x63olor\x18\x01 \x01(\x0b\x32\x12.google.type.Color\x12\r\n\x05score\x18\x02 \x01(\x02\x12\x16\n\x0epixel_fraction\x18\x03 \x01(\x02"M\n\x18\x44ominantColorsAnnotation\x12\x31\n\x06\x63olors\x18\x01 \x03(\x0b\x32!.google.cloud.vision.v1.ColorInfo"\\\n\x0fImageProperties\x12I\n\x0f\x64ominant_colors\x18\x01 \x01(\x0b\x32\x30.google.cloud.vision.v1.DominantColorsAnnotation"x\n\x08\x43ropHint\x12;\n\rbounding_poly\x18\x01 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\x1b\n\x13importance_fraction\x18\x03 \x01(\x02"K\n\x13\x43ropHintsAnnotation\x12\x34\n\ncrop_hints\x18\x01 \x03(\x0b\x32 .google.cloud.vision.v1.CropHint"(\n\x0f\x43ropHintsParams\x12\x15\n\raspect_ratios\x18\x01 \x03(\x02"1\n\x12WebDetectionParams\x12\x1b\n\x13include_geo_results\x18\x02 \x01(\x08"\xbc\x02\n\x0cImageContext\x12:\n\rlat_long_rect\x18\x01 \x01(\x0b\x32#.google.cloud.vision.v1.LatLongRect\x12\x16\n\x0elanguage_hints\x18\x02 \x03(\t\x12\x42\n\x11\x63rop_hints_params\x18\x04 \x01(\x0b\x32\'.google.cloud.vision.v1.CropHintsParams\x12J\n\x15product_search_params\x18\x05 \x01(\x0b\x32+.google.cloud.vision.v1.ProductSearchParams\x12H\n\x14web_detection_params\x18\x06 \x01(\x0b\x32*.google.cloud.vision.v1.WebDetectionParams"\xb4\x01\n\x14\x41nnotateImageRequest\x12,\n\x05image\x18\x01 \x01(\x0b\x32\x1d.google.cloud.vision.v1.Image\x12\x31\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32\x1f.google.cloud.vision.v1.Feature\x12;\n\rimage_context\x18\x03 \x01(\x0b\x32$.google.cloud.vision.v1.ImageContext":\n\x16ImageAnnotationContext\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12\x13\n\x0bpage_number\x18\x02 \x01(\x05"\xe4\x07\n\x15\x41nnotateImageResponse\x12@\n\x10\x66\x61\x63\x65_annotations\x18\x01 \x03(\x0b\x32&.google.cloud.vision.v1.FaceAnnotation\x12\x46\n\x14landmark_annotations\x18\x02 \x03(\x0b\x32(.google.cloud.vision.v1.EntityAnnotation\x12\x42\n\x10logo_annotations\x18\x03 \x03(\x0b\x32(.google.cloud.vision.v1.EntityAnnotation\x12\x43\n\x11label_annotations\x18\x04 \x03(\x0b\x32(.google.cloud.vision.v1.EntityAnnotation\x12W\n\x1clocalized_object_annotations\x18\x16 \x03(\x0b\x32\x31.google.cloud.vision.v1.LocalizedObjectAnnotation\x12\x42\n\x10text_annotations\x18\x05 \x03(\x0b\x32(.google.cloud.vision.v1.EntityAnnotation\x12\x44\n\x14\x66ull_text_annotation\x18\x0c \x01(\x0b\x32&.google.cloud.vision.v1.TextAnnotation\x12L\n\x16safe_search_annotation\x18\x06 \x01(\x0b\x32,.google.cloud.vision.v1.SafeSearchAnnotation\x12L\n\x1bimage_properties_annotation\x18\x08 \x01(\x0b\x32\'.google.cloud.vision.v1.ImageProperties\x12J\n\x15\x63rop_hints_annotation\x18\x0b \x01(\x0b\x32+.google.cloud.vision.v1.CropHintsAnnotation\x12;\n\rweb_detection\x18\r \x01(\x0b\x32$.google.cloud.vision.v1.WebDetection\x12L\n\x16product_search_results\x18\x0e \x01(\x0b\x32,.google.cloud.vision.v1.ProductSearchResults\x12!\n\x05\x65rror\x18\t \x01(\x0b\x32\x12.google.rpc.Status\x12?\n\x07\x63ontext\x18\x15 \x01(\x0b\x32..google.cloud.vision.v1.ImageAnnotationContext"q\n\x1a\x42\x61tchAnnotateImagesRequest\x12\x43\n\x08requests\x18\x01 \x03(\x0b\x32,.google.cloud.vision.v1.AnnotateImageRequestB\x03\xe0\x41\x02\x12\x0e\n\x06parent\x18\x04 \x01(\t"_\n\x1b\x42\x61tchAnnotateImagesResponse\x12@\n\tresponses\x18\x01 \x03(\x0b\x32-.google.cloud.vision.v1.AnnotateImageResponse"\xcf\x01\n\x13\x41nnotateFileRequest\x12\x39\n\x0cinput_config\x18\x01 \x01(\x0b\x32#.google.cloud.vision.v1.InputConfig\x12\x31\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32\x1f.google.cloud.vision.v1.Feature\x12;\n\rimage_context\x18\x03 \x01(\x0b\x32$.google.cloud.vision.v1.ImageContext\x12\r\n\x05pages\x18\x04 \x03(\x05"\xcb\x01\n\x14\x41nnotateFileResponse\x12\x39\n\x0cinput_config\x18\x01 \x01(\x0b\x32#.google.cloud.vision.v1.InputConfig\x12@\n\tresponses\x18\x02 \x03(\x0b\x32-.google.cloud.vision.v1.AnnotateImageResponse\x12\x13\n\x0btotal_pages\x18\x03 \x01(\x05\x12!\n\x05\x65rror\x18\x04 \x01(\x0b\x32\x12.google.rpc.Status"o\n\x19\x42\x61tchAnnotateFilesRequest\x12\x42\n\x08requests\x18\x01 \x03(\x0b\x32+.google.cloud.vision.v1.AnnotateFileRequestB\x03\xe0\x41\x02\x12\x0e\n\x06parent\x18\x03 \x01(\t"]\n\x1a\x42\x61tchAnnotateFilesResponse\x12?\n\tresponses\x18\x01 \x03(\x0b\x32,.google.cloud.vision.v1.AnnotateFileResponse"\x82\x02\n\x18\x41syncAnnotateFileRequest\x12\x39\n\x0cinput_config\x18\x01 \x01(\x0b\x32#.google.cloud.vision.v1.InputConfig\x12\x31\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32\x1f.google.cloud.vision.v1.Feature\x12;\n\rimage_context\x18\x03 \x01(\x0b\x32$.google.cloud.vision.v1.ImageContext\x12;\n\routput_config\x18\x04 \x01(\x0b\x32$.google.cloud.vision.v1.OutputConfig"X\n\x19\x41syncAnnotateFileResponse\x12;\n\routput_config\x18\x01 \x01(\x0b\x32$.google.cloud.vision.v1.OutputConfig"\xb8\x01\n\x1f\x41syncBatchAnnotateImagesRequest\x12\x43\n\x08requests\x18\x01 \x03(\x0b\x32,.google.cloud.vision.v1.AnnotateImageRequestB\x03\xe0\x41\x02\x12@\n\routput_config\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.OutputConfigB\x03\xe0\x41\x02\x12\x0e\n\x06parent\x18\x04 \x01(\t"_\n AsyncBatchAnnotateImagesResponse\x12;\n\routput_config\x18\x01 \x01(\x0b\x32$.google.cloud.vision.v1.OutputConfig"y\n\x1e\x41syncBatchAnnotateFilesRequest\x12G\n\x08requests\x18\x01 \x03(\x0b\x32\x30.google.cloud.vision.v1.AsyncAnnotateFileRequestB\x03\xe0\x41\x02\x12\x0e\n\x06parent\x18\x04 \x01(\t"g\n\x1f\x41syncBatchAnnotateFilesResponse\x12\x44\n\tresponses\x18\x01 \x03(\x0b\x32\x31.google.cloud.vision.v1.AsyncAnnotateFileResponse"h\n\x0bInputConfig\x12\x35\n\ngcs_source\x18\x01 \x01(\x0b\x32!.google.cloud.vision.v1.GcsSource\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\x0c\x12\x11\n\tmime_type\x18\x02 \x01(\t"c\n\x0cOutputConfig\x12?\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32&.google.cloud.vision.v1.GcsDestination\x12\x12\n\nbatch_size\x18\x02 \x01(\x05"\x18\n\tGcsSource\x12\x0b\n\x03uri\x18\x01 \x01(\t"\x1d\n\x0eGcsDestination\x12\x0b\n\x03uri\x18\x01 \x01(\t"\x88\x02\n\x11OperationMetadata\x12>\n\x05state\x18\x01 \x01(\x0e\x32/.google.cloud.vision.v1.OperationMetadata.State\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"Q\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x43REATED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03\x12\r\n\tCANCELLED\x10\x04*e\n\nLikelihood\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05\x32\xf5\n\n\x0eImageAnnotator\x12\x93\x02\n\x13\x42\x61tchAnnotateImages\x12\x32.google.cloud.vision.v1.BatchAnnotateImagesRequest\x1a\x33.google.cloud.vision.v1.BatchAnnotateImagesResponse"\x92\x01\x82\xd3\xe4\x93\x02\x80\x01"\x13/v1/images:annotate:\x01*Z8"3/v1/{parent=projects/*/locations/*}/images:annotate:\x01*Z,"\'/v1/{parent=projects/*}/images:annotate:\x01*\xda\x41\x08requests\x12\x8c\x02\n\x12\x42\x61tchAnnotateFiles\x12\x31.google.cloud.vision.v1.BatchAnnotateFilesRequest\x1a\x32.google.cloud.vision.v1.BatchAnnotateFilesResponse"\x8e\x01\x82\xd3\xe4\x93\x02}"\x12/v1/files:annotate:\x01*Z7"2/v1/{parent=projects/*/locations/*}/files:annotate:\x01*Z+"&/v1/{parent=projects/*}/files:annotate:\x01*\xda\x41\x08requests\x12\xeb\x02\n\x18\x41syncBatchAnnotateImages\x12\x37.google.cloud.vision.v1.AsyncBatchAnnotateImagesRequest\x1a\x1d.google.longrunning.Operation"\xf6\x01\x82\xd3\xe4\x93\x02\x9e\x01"\x1d/v1/images:asyncBatchAnnotate:\x01*ZB"=/v1/{parent=projects/*/locations/*}/images:asyncBatchAnnotate:\x01*Z6"1/v1/{parent=projects/*}/images:asyncBatchAnnotate:\x01*\xda\x41\x16requests,output_config\xca\x41\x35\n AsyncBatchAnnotateImagesResponse\x12\x11OperationMetadata\x12\xd7\x02\n\x17\x41syncBatchAnnotateFiles\x12\x36.google.cloud.vision.v1.AsyncBatchAnnotateFilesRequest\x1a\x1d.google.longrunning.Operation"\xe4\x01\x82\xd3\xe4\x93\x02\x9b\x01"\x1c/v1/files:asyncBatchAnnotate:\x01*ZA"\n\x1d\x41\x64\x64ProductToProductSetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07product\x18\x02 \x01(\t"C\n"RemoveProductFromProductSetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07product\x18\x02 \x01(\t"V\n\x1fListProductsInProductSetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"n\n ListProductsInProductSetResponse\x12\x31\n\x08products\x18\x01 \x03(\x0b\x32\x1f.google.cloud.vision.v1.Product\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"2\n\x1aImportProductSetsGcsSource\x12\x14\n\x0c\x63sv_file_uri\x18\x01 \x01(\t"r\n\x1cImportProductSetsInputConfig\x12H\n\ngcs_source\x18\x01 \x01(\x0b\x32\x32.google.cloud.vision.v1.ImportProductSetsGcsSourceH\x00\x42\x08\n\x06source"v\n\x18ImportProductSetsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12J\n\x0cinput_config\x18\x02 \x01(\x0b\x32\x34.google.cloud.vision.v1.ImportProductSetsInputConfig"\x83\x01\n\x19ImportProductSetsResponse\x12@\n\x10reference_images\x18\x01 \x03(\x0b\x32&.google.cloud.vision.v1.ReferenceImage\x12$\n\x08statuses\x18\x02 \x03(\x0b\x32\x12.google.rpc.Status"\x97\x02\n\x16\x42\x61tchOperationMetadata\x12\x43\n\x05state\x18\x01 \x01(\x0e\x32\x34.google.cloud.vision.v1.BatchOperationMetadata.State\x12/\n\x0bsubmit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"Y\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0e\n\nPROCESSING\x10\x01\x12\x0e\n\nSUCCESSFUL\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\r\n\tCANCELLED\x10\x04"/\n\x15ProductSetPurgeConfig\x12\x16\n\x0eproduct_set_id\x18\x01 \x01(\t"\xb4\x01\n\x14PurgeProductsRequest\x12Q\n\x18product_set_purge_config\x18\x02 \x01(\x0b\x32-.google.cloud.vision.v1.ProductSetPurgeConfigH\x00\x12 \n\x16\x64\x65lete_orphan_products\x18\x03 \x01(\x08H\x00\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\r\n\x05\x66orce\x18\x04 \x01(\x08\x42\x08\n\x06target2\xe5\x1a\n\rProductSearch\x12\xad\x01\n\x10\x43reateProductSet\x12/.google.cloud.vision.v1.CreateProductSetRequest\x1a".google.cloud.vision.v1.ProductSet"D\x82\xd3\xe4\x93\x02>"//v1/{parent=projects/*/locations/*}/productSets:\x0bproduct_set\x12\xab\x01\n\x0fListProductSets\x12..google.cloud.vision.v1.ListProductSetsRequest\x1a/.google.cloud.vision.v1.ListProductSetsResponse"7\x82\xd3\xe4\x93\x02\x31\x12//v1/{parent=projects/*/locations/*}/productSets\x12\x9a\x01\n\rGetProductSet\x12,.google.cloud.vision.v1.GetProductSetRequest\x1a".google.cloud.vision.v1.ProductSet"7\x82\xd3\xe4\x93\x02\x31\x12//v1/{name=projects/*/locations/*/productSets/*}\x12\xb9\x01\n\x10UpdateProductSet\x12/.google.cloud.vision.v1.UpdateProductSetRequest\x1a".google.cloud.vision.v1.ProductSet"P\x82\xd3\xe4\x93\x02J2;/v1/{product_set.name=projects/*/locations/*/productSets/*}:\x0bproduct_set\x12\x94\x01\n\x10\x44\x65leteProductSet\x12/.google.cloud.vision.v1.DeleteProductSetRequest\x1a\x16.google.protobuf.Empty"7\x82\xd3\xe4\x93\x02\x31*//v1/{name=projects/*/locations/*/productSets/*}\x12\x9d\x01\n\rCreateProduct\x12,.google.cloud.vision.v1.CreateProductRequest\x1a\x1f.google.cloud.vision.v1.Product"=\x82\xd3\xe4\x93\x02\x37",/v1/{parent=projects/*/locations/*}/products:\x07product\x12\x9f\x01\n\x0cListProducts\x12+.google.cloud.vision.v1.ListProductsRequest\x1a,.google.cloud.vision.v1.ListProductsResponse"4\x82\xd3\xe4\x93\x02.\x12,/v1/{parent=projects/*/locations/*}/products\x12\x8e\x01\n\nGetProduct\x12).google.cloud.vision.v1.GetProductRequest\x1a\x1f.google.cloud.vision.v1.Product"4\x82\xd3\xe4\x93\x02.\x12,/v1/{name=projects/*/locations/*/products/*}\x12\xa5\x01\n\rUpdateProduct\x12,.google.cloud.vision.v1.UpdateProductRequest\x1a\x1f.google.cloud.vision.v1.Product"E\x82\xd3\xe4\x93\x02?24/v1/{product.name=projects/*/locations/*/products/*}:\x07product\x12\x8b\x01\n\rDeleteProduct\x12,.google.cloud.vision.v1.DeleteProductRequest\x1a\x16.google.protobuf.Empty"4\x82\xd3\xe4\x93\x02.*,/v1/{name=projects/*/locations/*/products/*}\x12\xcc\x01\n\x14\x43reateReferenceImage\x12\x33.google.cloud.vision.v1.CreateReferenceImageRequest\x1a&.google.cloud.vision.v1.ReferenceImage"W\x82\xd3\xe4\x93\x02Q">/v1/{parent=projects/*/locations/*/products/*}/referenceImages:\x0freference_image\x12\xab\x01\n\x14\x44\x65leteReferenceImage\x12\x33.google.cloud.vision.v1.DeleteReferenceImageRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02@*>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}\x12\xc6\x01\n\x13ListReferenceImages\x12\x32.google.cloud.vision.v1.ListReferenceImagesRequest\x1a\x33.google.cloud.vision.v1.ListReferenceImagesResponse"F\x82\xd3\xe4\x93\x02@\x12>/v1/{parent=projects/*/locations/*/products/*}/referenceImages\x12\xb5\x01\n\x11GetReferenceImage\x12\x30.google.cloud.vision.v1.GetReferenceImageRequest\x1a&.google.cloud.vision.v1.ReferenceImage"F\x82\xd3\xe4\x93\x02@\x12>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}\x12\xae\x01\n\x16\x41\x64\x64ProductToProductSet\x12\x35.google.cloud.vision.v1.AddProductToProductSetRequest\x1a\x16.google.protobuf.Empty"E\x82\xd3\xe4\x93\x02?":/v1/{name=projects/*/locations/*/productSets/*}:addProduct:\x01*\x12\xbb\x01\n\x1bRemoveProductFromProductSet\x12:.google.cloud.vision.v1.RemoveProductFromProductSetRequest\x1a\x16.google.protobuf.Empty"H\x82\xd3\xe4\x93\x02\x42"=/v1/{name=projects/*/locations/*/productSets/*}:removeProduct:\x01*\x12\xcf\x01\n\x18ListProductsInProductSet\x12\x37.google.cloud.vision.v1.ListProductsInProductSetRequest\x1a\x38.google.cloud.vision.v1.ListProductsInProductSetResponse"@\x82\xd3\xe4\x93\x02:\x12\x38/v1/{name=projects/*/locations/*/productSets/*}/products\x12\xa7\x01\n\x11ImportProductSets\x12\x30.google.cloud.vision.v1.ImportProductSetsRequest\x1a\x1d.google.longrunning.Operation"A\x82\xd3\xe4\x93\x02;"6/v1/{parent=projects/*/locations/*}/productSets:import:\x01*\x12\x9b\x01\n\rPurgeProducts\x12,.google.cloud.vision.v1.PurgeProductsRequest\x1a\x1d.google.longrunning.Operation"=\x82\xd3\xe4\x93\x02\x37"2/v1/{parent=projects/*/locations/*}/products:purge:\x01*\x1av\xca\x41\x15vision.googleapis.com\xd2\x41[https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-visionB\x81\x01\n\x1a\x63om.google.cloud.vision.v1B\x19ProductSearchServiceProtoP\x01Z"//v1/{parent=projects/*/locations/*}/productSets:\x0bproduct_set\xda\x41!parent,product_set,product_set_id\x12\xb4\x01\n\x0fListProductSets\x12..google.cloud.vision.v1.ListProductSetsRequest\x1a/.google.cloud.vision.v1.ListProductSetsResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v1/{parent=projects/*/locations/*}/productSets\xda\x41\x06parent\x12\xa1\x01\n\rGetProductSet\x12,.google.cloud.vision.v1.GetProductSetRequest\x1a".google.cloud.vision.v1.ProductSet">\x82\xd3\xe4\x93\x02\x31\x12//v1/{name=projects/*/locations/*/productSets/*}\xda\x41\x04name\x12\xd3\x01\n\x10UpdateProductSet\x12/.google.cloud.vision.v1.UpdateProductSetRequest\x1a".google.cloud.vision.v1.ProductSet"j\x82\xd3\xe4\x93\x02J2;/v1/{product_set.name=projects/*/locations/*/productSets/*}:\x0bproduct_set\xda\x41\x17product_set,update_mask\x12\x9b\x01\n\x10\x44\x65leteProductSet\x12/.google.cloud.vision.v1.DeleteProductSetRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v1/{name=projects/*/locations/*/productSets/*}\xda\x41\x04name\x12\xb9\x01\n\rCreateProduct\x12,.google.cloud.vision.v1.CreateProductRequest\x1a\x1f.google.cloud.vision.v1.Product"Y\x82\xd3\xe4\x93\x02\x37",/v1/{parent=projects/*/locations/*}/products:\x07product\xda\x41\x19parent,product,product_id\x12\xa8\x01\n\x0cListProducts\x12+.google.cloud.vision.v1.ListProductsRequest\x1a,.google.cloud.vision.v1.ListProductsResponse"=\x82\xd3\xe4\x93\x02.\x12,/v1/{parent=projects/*/locations/*}/products\xda\x41\x06parent\x12\x95\x01\n\nGetProduct\x12).google.cloud.vision.v1.GetProductRequest\x1a\x1f.google.cloud.vision.v1.Product";\x82\xd3\xe4\x93\x02.\x12,/v1/{name=projects/*/locations/*/products/*}\xda\x41\x04name\x12\xbb\x01\n\rUpdateProduct\x12,.google.cloud.vision.v1.UpdateProductRequest\x1a\x1f.google.cloud.vision.v1.Product"[\x82\xd3\xe4\x93\x02?24/v1/{product.name=projects/*/locations/*/products/*}:\x07product\xda\x41\x13product,update_mask\x12\x92\x01\n\rDeleteProduct\x12,.google.cloud.vision.v1.DeleteProductRequest\x1a\x16.google.protobuf.Empty";\x82\xd3\xe4\x93\x02.*,/v1/{name=projects/*/locations/*/products/*}\xda\x41\x04name\x12\xf9\x01\n\x14\x43reateReferenceImage\x12\x33.google.cloud.vision.v1.CreateReferenceImageRequest\x1a&.google.cloud.vision.v1.ReferenceImage"\x83\x01\x82\xd3\xe4\x93\x02Q">/v1/{parent=projects/*/locations/*/products/*}/referenceImages:\x0freference_image\xda\x41)parent,reference_image,reference_image_id\x12\xb2\x01\n\x14\x44\x65leteReferenceImage\x12\x33.google.cloud.vision.v1.DeleteReferenceImageRequest\x1a\x16.google.protobuf.Empty"M\x82\xd3\xe4\x93\x02@*>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}\xda\x41\x04name\x12\xcf\x01\n\x13ListReferenceImages\x12\x32.google.cloud.vision.v1.ListReferenceImagesRequest\x1a\x33.google.cloud.vision.v1.ListReferenceImagesResponse"O\x82\xd3\xe4\x93\x02@\x12>/v1/{parent=projects/*/locations/*/products/*}/referenceImages\xda\x41\x06parent\x12\xbc\x01\n\x11GetReferenceImage\x12\x30.google.cloud.vision.v1.GetReferenceImageRequest\x1a&.google.cloud.vision.v1.ReferenceImage"M\x82\xd3\xe4\x93\x02@\x12>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}\xda\x41\x04name\x12\xbd\x01\n\x16\x41\x64\x64ProductToProductSet\x12\x35.google.cloud.vision.v1.AddProductToProductSetRequest\x1a\x16.google.protobuf.Empty"T\x82\xd3\xe4\x93\x02?":/v1/{name=projects/*/locations/*/productSets/*}:addProduct:\x01*\xda\x41\x0cname,product\x12\xca\x01\n\x1bRemoveProductFromProductSet\x12:.google.cloud.vision.v1.RemoveProductFromProductSetRequest\x1a\x16.google.protobuf.Empty"W\x82\xd3\xe4\x93\x02\x42"=/v1/{name=projects/*/locations/*/productSets/*}:removeProduct:\x01*\xda\x41\x0cname,product\x12\xd6\x01\n\x18ListProductsInProductSet\x12\x37.google.cloud.vision.v1.ListProductsInProductSetRequest\x1a\x38.google.cloud.vision.v1.ListProductsInProductSetResponse"G\x82\xd3\xe4\x93\x02:\x12\x38/v1/{name=projects/*/locations/*/productSets/*}/products\xda\x41\x04name\x12\xf4\x01\n\x11ImportProductSets\x12\x30.google.cloud.vision.v1.ImportProductSetsRequest\x1a\x1d.google.longrunning.Operation"\x8d\x01\x82\xd3\xe4\x93\x02;"6/v1/{parent=projects/*/locations/*}/productSets:import:\x01*\xda\x41\x13parent,input_config\xca\x41\x33\n\x19ImportProductSetsResponse\x12\x16\x42\x61tchOperationMetadata\x12\xd6\x01\n\rPurgeProducts\x12,.google.cloud.vision.v1.PurgeProductsRequest\x1a\x1d.google.longrunning.Operation"x\x82\xd3\xe4\x93\x02\x37"2/v1/{parent=projects/*/locations/*}/products:purge:\x01*\xda\x41\x06parent\xca\x41/\n\x15google.protobuf.Empty\x12\x16\x42\x61tchOperationMetadata\x1av\xca\x41\x15vision.googleapis.com\xd2\x41[https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-visionB\x81\x01\n\x1a\x63om.google.cloud.vision.v1B\x19ProductSearchServiceProtoP\x01Z"//v1/{parent=projects/*/locations/*}/productSets:\013product_set' + '\202\323\344\223\002>"//v1/{parent=projects/*/locations/*}/productSets:\013product_set\332A!parent,product_set,product_set_id' ), ), _descriptor.MethodDescriptor( @@ -3061,7 +3136,7 @@ input_type=_LISTPRODUCTSETSREQUEST, output_type=_LISTPRODUCTSETSRESPONSE, serialized_options=_b( - "\202\323\344\223\0021\022//v1/{parent=projects/*/locations/*}/productSets" + "\202\323\344\223\0021\022//v1/{parent=projects/*/locations/*}/productSets\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3072,7 +3147,7 @@ input_type=_GETPRODUCTSETREQUEST, output_type=_PRODUCTSET, serialized_options=_b( - "\202\323\344\223\0021\022//v1/{name=projects/*/locations/*/productSets/*}" + "\202\323\344\223\0021\022//v1/{name=projects/*/locations/*/productSets/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3083,7 +3158,7 @@ input_type=_UPDATEPRODUCTSETREQUEST, output_type=_PRODUCTSET, serialized_options=_b( - "\202\323\344\223\002J2;/v1/{product_set.name=projects/*/locations/*/productSets/*}:\013product_set" + "\202\323\344\223\002J2;/v1/{product_set.name=projects/*/locations/*/productSets/*}:\013product_set\332A\027product_set,update_mask" ), ), _descriptor.MethodDescriptor( @@ -3094,7 +3169,7 @@ input_type=_DELETEPRODUCTSETREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\0021*//v1/{name=projects/*/locations/*/productSets/*}" + "\202\323\344\223\0021*//v1/{name=projects/*/locations/*/productSets/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3105,7 +3180,7 @@ input_type=_CREATEPRODUCTREQUEST, output_type=_PRODUCT, serialized_options=_b( - '\202\323\344\223\0027",/v1/{parent=projects/*/locations/*}/products:\007product' + '\202\323\344\223\0027",/v1/{parent=projects/*/locations/*}/products:\007product\332A\031parent,product,product_id' ), ), _descriptor.MethodDescriptor( @@ -3116,7 +3191,7 @@ input_type=_LISTPRODUCTSREQUEST, output_type=_LISTPRODUCTSRESPONSE, serialized_options=_b( - "\202\323\344\223\002.\022,/v1/{parent=projects/*/locations/*}/products" + "\202\323\344\223\002.\022,/v1/{parent=projects/*/locations/*}/products\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3127,7 +3202,7 @@ input_type=_GETPRODUCTREQUEST, output_type=_PRODUCT, serialized_options=_b( - "\202\323\344\223\002.\022,/v1/{name=projects/*/locations/*/products/*}" + "\202\323\344\223\002.\022,/v1/{name=projects/*/locations/*/products/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3138,7 +3213,7 @@ input_type=_UPDATEPRODUCTREQUEST, output_type=_PRODUCT, serialized_options=_b( - "\202\323\344\223\002?24/v1/{product.name=projects/*/locations/*/products/*}:\007product" + "\202\323\344\223\002?24/v1/{product.name=projects/*/locations/*/products/*}:\007product\332A\023product,update_mask" ), ), _descriptor.MethodDescriptor( @@ -3149,7 +3224,7 @@ input_type=_DELETEPRODUCTREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002.*,/v1/{name=projects/*/locations/*/products/*}" + "\202\323\344\223\002.*,/v1/{name=projects/*/locations/*/products/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3160,7 +3235,7 @@ input_type=_CREATEREFERENCEIMAGEREQUEST, output_type=_REFERENCEIMAGE, serialized_options=_b( - '\202\323\344\223\002Q">/v1/{parent=projects/*/locations/*/products/*}/referenceImages:\017reference_image' + '\202\323\344\223\002Q">/v1/{parent=projects/*/locations/*/products/*}/referenceImages:\017reference_image\332A)parent,reference_image,reference_image_id' ), ), _descriptor.MethodDescriptor( @@ -3171,7 +3246,7 @@ input_type=_DELETEREFERENCEIMAGEREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002@*>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}" + "\202\323\344\223\002@*>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3182,7 +3257,7 @@ input_type=_LISTREFERENCEIMAGESREQUEST, output_type=_LISTREFERENCEIMAGESRESPONSE, serialized_options=_b( - "\202\323\344\223\002@\022>/v1/{parent=projects/*/locations/*/products/*}/referenceImages" + "\202\323\344\223\002@\022>/v1/{parent=projects/*/locations/*/products/*}/referenceImages\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3193,7 +3268,7 @@ input_type=_GETREFERENCEIMAGEREQUEST, output_type=_REFERENCEIMAGE, serialized_options=_b( - "\202\323\344\223\002@\022>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}" + "\202\323\344\223\002@\022>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3204,7 +3279,7 @@ input_type=_ADDPRODUCTTOPRODUCTSETREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - '\202\323\344\223\002?":/v1/{name=projects/*/locations/*/productSets/*}:addProduct:\001*' + '\202\323\344\223\002?":/v1/{name=projects/*/locations/*/productSets/*}:addProduct:\001*\332A\014name,product' ), ), _descriptor.MethodDescriptor( @@ -3215,7 +3290,7 @@ input_type=_REMOVEPRODUCTFROMPRODUCTSETREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - '\202\323\344\223\002B"=/v1/{name=projects/*/locations/*/productSets/*}:removeProduct:\001*' + '\202\323\344\223\002B"=/v1/{name=projects/*/locations/*/productSets/*}:removeProduct:\001*\332A\014name,product' ), ), _descriptor.MethodDescriptor( @@ -3226,7 +3301,7 @@ input_type=_LISTPRODUCTSINPRODUCTSETREQUEST, output_type=_LISTPRODUCTSINPRODUCTSETRESPONSE, serialized_options=_b( - "\202\323\344\223\002:\0228/v1/{name=projects/*/locations/*/productSets/*}/products" + "\202\323\344\223\002:\0228/v1/{name=projects/*/locations/*/productSets/*}/products\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3237,7 +3312,7 @@ input_type=_IMPORTPRODUCTSETSREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002;"6/v1/{parent=projects/*/locations/*}/productSets:import:\001*' + '\202\323\344\223\002;"6/v1/{parent=projects/*/locations/*}/productSets:import:\001*\332A\023parent,input_config\312A3\n\031ImportProductSetsResponse\022\026BatchOperationMetadata' ), ), _descriptor.MethodDescriptor( @@ -3248,7 +3323,7 @@ input_type=_PURGEPRODUCTSREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\0027"2/v1/{parent=projects/*/locations/*}/products:purge:\001*' + '\202\323\344\223\0027"2/v1/{parent=projects/*/locations/*}/products:purge:\001*\332A\006parent\312A/\n\025google.protobuf.Empty\022\026BatchOperationMetadata' ), ), ], diff --git a/vision/google/cloud/vision_v1/proto/product_search_service_pb2_grpc.py b/vision/google/cloud/vision_v1/proto/product_search_service_pb2_grpc.py index 447fbf5c7313..d40d1a112f42 100644 --- a/vision/google/cloud/vision_v1/proto/product_search_service_pb2_grpc.py +++ b/vision/google/cloud/vision_v1/proto/product_search_service_pb2_grpc.py @@ -14,18 +14,16 @@ class ProductSearchStub(object): """Manages Products and ProductSets of reference images for use in product search. It uses the following resource model: - - The API has a collection of [ProductSet][google.cloud.vision.v1.ProductSet] - resources, named `projects/*/locations/*/productSets/*`, which acts as a way - to put different products into groups to limit identification. + - The API has a collection of [ProductSet][google.cloud.vision.v1.ProductSet] resources, named + `projects/*/locations/*/productSets/*`, which acts as a way to put different + products into groups to limit identification. In parallel, - - The API has a collection of [Product][google.cloud.vision.v1.Product] - resources, named + - The API has a collection of [Product][google.cloud.vision.v1.Product] resources, named `projects/*/locations/*/products/*` - - Each [Product][google.cloud.vision.v1.Product] has a collection of - [ReferenceImage][google.cloud.vision.v1.ReferenceImage] resources, named + - Each [Product][google.cloud.vision.v1.Product] has a collection of [ReferenceImage][google.cloud.vision.v1.ReferenceImage] resources, named `projects/*/locations/*/products/*/referenceImages/*` """ @@ -136,18 +134,16 @@ class ProductSearchServicer(object): """Manages Products and ProductSets of reference images for use in product search. It uses the following resource model: - - The API has a collection of [ProductSet][google.cloud.vision.v1.ProductSet] - resources, named `projects/*/locations/*/productSets/*`, which acts as a way - to put different products into groups to limit identification. + - The API has a collection of [ProductSet][google.cloud.vision.v1.ProductSet] resources, named + `projects/*/locations/*/productSets/*`, which acts as a way to put different + products into groups to limit identification. In parallel, - - The API has a collection of [Product][google.cloud.vision.v1.Product] - resources, named + - The API has a collection of [Product][google.cloud.vision.v1.Product] resources, named `projects/*/locations/*/products/*` - - Each [Product][google.cloud.vision.v1.Product] has a collection of - [ReferenceImage][google.cloud.vision.v1.ReferenceImage] resources, named + - Each [Product][google.cloud.vision.v1.Product] has a collection of [ReferenceImage][google.cloud.vision.v1.ReferenceImage] resources, named `projects/*/locations/*/products/*/referenceImages/*` """ @@ -378,8 +374,8 @@ def ImportProductSets(self, request, context): """Asynchronous API that imports a list of reference images to specified product sets based on a list of image information. - The [google.longrunning.Operation][google.longrunning.Operation] API can be - used to keep track of the progress and results of the request. + The [google.longrunning.Operation][google.longrunning.Operation] API can be used to keep track of the + progress and results of the request. `Operation.metadata` contains `BatchOperationMetadata`. (progress) `Operation.response` contains `ImportProductSetsResponse`. (results) @@ -413,8 +409,8 @@ def PurgeProducts(self, request, context): ProductSet, you must wait until the PurgeProducts operation has finished for that ProductSet. - The [google.longrunning.Operation][google.longrunning.Operation] API can be - used to keep track of the progress and results of the request. + The [google.longrunning.Operation][google.longrunning.Operation] API can be used to keep track of the + progress and results of the request. `Operation.metadata` contains `BatchOperationMetadata`. (progress) """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) diff --git a/vision/google/cloud/vision_v1/proto/text_annotation.proto b/vision/google/cloud/vision_v1/proto/text_annotation.proto index 09459b90e69d..97f9c8fa9c2e 100644 --- a/vision/google/cloud/vision_v1/proto/text_annotation.proto +++ b/vision/google/cloud/vision_v1/proto/text_annotation.proto @@ -17,8 +17,8 @@ syntax = "proto3"; package google.cloud.vision.v1; -import "google/api/annotations.proto"; import "google/cloud/vision/v1/geometry.proto"; +import "google/api/annotations.proto"; option cc_enable_arenas = true; option go_package = "google.golang.org/genproto/googleapis/cloud/vision/v1;vision"; @@ -32,9 +32,8 @@ option objc_class_prefix = "GCVN"; // TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol // Each structural component, starting from Page, may further have their own // properties. Properties describe detected languages, breaks etc.. Please refer -// to the -// [TextAnnotation.TextProperty][google.cloud.vision.v1.TextAnnotation.TextProperty] -// message definition below for more detail. +// to the [TextAnnotation.TextProperty][google.cloud.vision.v1.TextAnnotation.TextProperty] message definition below for more +// detail. message TextAnnotation { // Detected language for a structural component. message DetectedLanguage { diff --git a/vision/google/cloud/vision_v1/proto/text_annotation_pb2.py b/vision/google/cloud/vision_v1/proto/text_annotation_pb2.py index c5af2e0bbead..57627455811b 100644 --- a/vision/google/cloud/vision_v1/proto/text_annotation_pb2.py +++ b/vision/google/cloud/vision_v1/proto/text_annotation_pb2.py @@ -15,10 +15,10 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.cloud.vision_v1.proto import ( geometry_pb2 as google_dot_cloud_dot_vision__v1_dot_proto_dot_geometry__pb2, ) +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -29,11 +29,11 @@ "\n\032com.google.cloud.vision.v1B\023TextAnnotationProtoP\001Z.google.cloud.vision.v1.TextAnnotation.DetectedBreak.BreakType\x12\x11\n\tis_prefix\x18\x02 \x01(\x08"c\n\tBreakType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05SPACE\x10\x01\x12\x0e\n\nSURE_SPACE\x10\x02\x12\x12\n\x0e\x45OL_SURE_SPACE\x10\x03\x12\n\n\x06HYPHEN\x10\x04\x12\x0e\n\nLINE_BREAK\x10\x05\x1a\xb1\x01\n\x0cTextProperty\x12S\n\x12\x64\x65tected_languages\x18\x01 \x03(\x0b\x32\x37.google.cloud.vision.v1.TextAnnotation.DetectedLanguage\x12L\n\x0e\x64\x65tected_break\x18\x02 \x01(\x0b\x32\x34.google.cloud.vision.v1.TextAnnotation.DetectedBreak"\xaf\x01\n\x04Page\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12\r\n\x05width\x18\x02 \x01(\x05\x12\x0e\n\x06height\x18\x03 \x01(\x05\x12-\n\x06\x62locks\x18\x04 \x03(\x0b\x32\x1d.google.cloud.vision.v1.Block\x12\x12\n\nconfidence\x18\x05 \x01(\x02"\xe6\x02\n\x05\x42lock\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12:\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12\x35\n\nparagraphs\x18\x03 \x03(\x0b\x32!.google.cloud.vision.v1.Paragraph\x12;\n\nblock_type\x18\x04 \x01(\x0e\x32\'.google.cloud.vision.v1.Block.BlockType\x12\x12\n\nconfidence\x18\x05 \x01(\x02"R\n\tBlockType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x08\n\x04TEXT\x10\x01\x12\t\n\x05TABLE\x10\x02\x12\x0b\n\x07PICTURE\x10\x03\x12\t\n\x05RULER\x10\x04\x12\x0b\n\x07\x42\x41RCODE\x10\x05"\xcf\x01\n\tParagraph\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12:\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12+\n\x05words\x18\x03 \x03(\x0b\x32\x1c.google.cloud.vision.v1.Word\x12\x12\n\nconfidence\x18\x04 \x01(\x02"\xce\x01\n\x04Word\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12:\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12/\n\x07symbols\x18\x03 \x03(\x0b\x32\x1e.google.cloud.vision.v1.Symbol\x12\x12\n\nconfidence\x18\x04 \x01(\x02"\xad\x01\n\x06Symbol\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12:\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12\x0c\n\x04text\x18\x03 \x01(\t\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x42{\n\x1a\x63om.google.cloud.vision.v1B\x13TextAnnotationProtoP\x01Z.google.cloud.vision.v1.TextAnnotation.DetectedBreak.BreakType\x12\x11\n\tis_prefix\x18\x02 \x01(\x08"c\n\tBreakType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\t\n\x05SPACE\x10\x01\x12\x0e\n\nSURE_SPACE\x10\x02\x12\x12\n\x0e\x45OL_SURE_SPACE\x10\x03\x12\n\n\x06HYPHEN\x10\x04\x12\x0e\n\nLINE_BREAK\x10\x05\x1a\xb1\x01\n\x0cTextProperty\x12S\n\x12\x64\x65tected_languages\x18\x01 \x03(\x0b\x32\x37.google.cloud.vision.v1.TextAnnotation.DetectedLanguage\x12L\n\x0e\x64\x65tected_break\x18\x02 \x01(\x0b\x32\x34.google.cloud.vision.v1.TextAnnotation.DetectedBreak"\xaf\x01\n\x04Page\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12\r\n\x05width\x18\x02 \x01(\x05\x12\x0e\n\x06height\x18\x03 \x01(\x05\x12-\n\x06\x62locks\x18\x04 \x03(\x0b\x32\x1d.google.cloud.vision.v1.Block\x12\x12\n\nconfidence\x18\x05 \x01(\x02"\xe6\x02\n\x05\x42lock\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12:\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12\x35\n\nparagraphs\x18\x03 \x03(\x0b\x32!.google.cloud.vision.v1.Paragraph\x12;\n\nblock_type\x18\x04 \x01(\x0e\x32\'.google.cloud.vision.v1.Block.BlockType\x12\x12\n\nconfidence\x18\x05 \x01(\x02"R\n\tBlockType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x08\n\x04TEXT\x10\x01\x12\t\n\x05TABLE\x10\x02\x12\x0b\n\x07PICTURE\x10\x03\x12\t\n\x05RULER\x10\x04\x12\x0b\n\x07\x42\x41RCODE\x10\x05"\xcf\x01\n\tParagraph\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12:\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12+\n\x05words\x18\x03 \x03(\x0b\x32\x1c.google.cloud.vision.v1.Word\x12\x12\n\nconfidence\x18\x04 \x01(\x02"\xce\x01\n\x04Word\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12:\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12/\n\x07symbols\x18\x03 \x03(\x0b\x32\x1e.google.cloud.vision.v1.Symbol\x12\x12\n\nconfidence\x18\x04 \x01(\x02"\xad\x01\n\x06Symbol\x12\x45\n\x08property\x18\x01 \x01(\x0b\x32\x33.google.cloud.vision.v1.TextAnnotation.TextProperty\x12:\n\x0c\x62ounding_box\x18\x02 \x01(\x0b\x32$.google.cloud.vision.v1.BoundingPoly\x12\x0c\n\x04text\x18\x03 \x01(\t\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x42{\n\x1a\x63om.google.cloud.vision.v1B\x13TextAnnotationProtoP\x01Z>> response = client.batch_annotate_files(requests) Args: - requests (list[Union[dict, ~google.cloud.vision_v1p4beta1.types.AnnotateFileRequest]]): The list of file annotation requests. Right now we support only one - AnnotateFileRequest in BatchAnnotateFilesRequest. + requests (list[Union[dict, ~google.cloud.vision_v1p4beta1.types.AnnotateFileRequest]]): Required. The list of file annotation requests. Right now we support only + one AnnotateFileRequest in BatchAnnotateFilesRequest. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1p4beta1.types.AnnotateFileRequest` @@ -363,7 +363,7 @@ def async_batch_annotate_images( >>> metadata = response.metadata() Args: - requests (list[Union[dict, ~google.cloud.vision_v1p4beta1.types.AnnotateImageRequest]]): Individual image annotation requests for this batch. + requests (list[Union[dict, ~google.cloud.vision_v1p4beta1.types.AnnotateImageRequest]]): Required. Individual image annotation requests for this batch. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1p4beta1.types.AnnotateImageRequest` @@ -451,7 +451,7 @@ def async_batch_annotate_files( >>> metadata = response.metadata() Args: - requests (list[Union[dict, ~google.cloud.vision_v1p4beta1.types.AsyncAnnotateFileRequest]]): Individual async file annotation requests for this batch. + requests (list[Union[dict, ~google.cloud.vision_v1p4beta1.types.AsyncAnnotateFileRequest]]): Required. Individual async file annotation requests for this batch. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1p4beta1.types.AsyncAnnotateFileRequest` diff --git a/vision/google/cloud/vision_v1p4beta1/gapic/product_search_client.py b/vision/google/cloud/vision_v1p4beta1/gapic/product_search_client.py index f9543abd2de5..4d2197c8a0eb 100644 --- a/vision/google/cloud/vision_v1p4beta1/gapic/product_search_client.py +++ b/vision/google/cloud/vision_v1p4beta1/gapic/product_search_client.py @@ -31,6 +31,7 @@ from google.api_core import operations_v1 import google.api_core.page_iterator import google.api_core.path_template +import google.api_core.protobuf_helpers import grpc from google.cloud.vision_v1p4beta1.gapic import enums @@ -277,10 +278,10 @@ def create_product_set( >>> response = client.create_product_set(parent, product_set, product_set_id) Args: - parent (str): The project in which the ProductSet should be created. + parent (str): Required. The project in which the ProductSet should be created. Format is ``projects/PROJECT_ID/locations/LOC_ID``. - product_set (Union[dict, ~google.cloud.vision_v1p4beta1.types.ProductSet]): The ProductSet to create. + product_set (Union[dict, ~google.cloud.vision_v1p4beta1.types.ProductSet]): Required. The ProductSet to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1p4beta1.types.ProductSet` @@ -376,7 +377,7 @@ def list_product_sets( ... pass Args: - parent (str): The project from which ProductSets should be listed. + parent (str): Required. The project from which ProductSets should be listed. Format is ``projects/PROJECT_ID/locations/LOC_ID``. page_size (int): The maximum number of resources contained in the @@ -472,7 +473,7 @@ def get_product_set( >>> response = client.get_product_set(name) Args: - name (str): Resource name of the ProductSet to get. + name (str): Required. Resource name of the ProductSet to get. Format is: ``projects/PROJECT_ID/locations/LOG_ID/productSets/PRODUCT_SET_ID`` @@ -556,7 +557,7 @@ def update_product_set( >>> response = client.update_product_set(product_set, update_mask) Args: - product_set (Union[dict, ~google.cloud.vision_v1p4beta1.types.ProductSet]): The ProductSet resource which replaces the one on the server. + product_set (Union[dict, ~google.cloud.vision_v1p4beta1.types.ProductSet]): Required. The ProductSet resource which replaces the one on the server. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1p4beta1.types.ProductSet` @@ -629,10 +630,6 @@ def delete_product_set( The actual image files are not deleted from Google Cloud Storage. - Possible errors: - - - Returns NOT\_FOUND if the ProductSet does not exist. - Example: >>> from google.cloud import vision_v1p4beta1 >>> @@ -643,7 +640,7 @@ def delete_product_set( >>> client.delete_product_set(name) Args: - name (str): Resource name of the ProductSet to delete. + name (str): Required. Resource name of the ProductSet to delete. Format is: ``projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`` @@ -728,10 +725,10 @@ def create_product( >>> response = client.create_product(parent, product, product_id) Args: - parent (str): The project in which the Product should be created. + parent (str): Required. The project in which the Product should be created. Format is ``projects/PROJECT_ID/locations/LOC_ID``. - product (Union[dict, ~google.cloud.vision_v1p4beta1.types.Product]): The product to create. + product (Union[dict, ~google.cloud.vision_v1p4beta1.types.Product]): Required. The product to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1p4beta1.types.Product` @@ -827,7 +824,8 @@ def list_products( ... pass Args: - parent (str): The project OR ProductSet from which Products should be listed. + parent (str): Required. The project OR ProductSet from which Products should be + listed. Format: ``projects/PROJECT_ID/locations/LOC_ID`` page_size (int): The maximum number of resources contained in the @@ -923,7 +921,7 @@ def get_product( >>> response = client.get_product(name) Args: - name (str): Resource name of the Product to get. + name (str): Required. Resource name of the Product to get. Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1013,7 +1011,7 @@ def update_product( >>> response = client.update_product(product, update_mask) Args: - product (Union[dict, ~google.cloud.vision_v1p4beta1.types.Product]): The Product resource which replaces the one on the server. + product (Union[dict, ~google.cloud.vision_v1p4beta1.types.Product]): Required. The Product resource which replaces the one on the server. product.name is immutable. If a dict is provided, it must be of the same form as the protobuf @@ -1084,13 +1082,9 @@ def delete_product( """ Permanently deletes a product and its reference images. - Metadata of the product and all its images will be deleted right away, - but search queries against ProductSets containing the product may still - work until all related caches are refreshed. - - Possible errors: - - - Returns NOT\_FOUND if the product does not exist. + Metadata of the product and all its images will be deleted right away, but + search queries against ProductSets containing the product may still work + until all related caches are refreshed. Example: >>> from google.cloud import vision_v1p4beta1 @@ -1102,7 +1096,7 @@ def delete_product( >>> client.delete_product(name) Args: - name (str): Resource name of product to delete. + name (str): Required. Resource name of product to delete. Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1198,10 +1192,11 @@ def create_reference_image( >>> response = client.create_reference_image(parent, reference_image, reference_image_id) Args: - parent (str): Resource name of the product in which to create the reference image. + parent (str): Required. Resource name of the product in which to create the reference + image. Format is ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID``. - reference_image (Union[dict, ~google.cloud.vision_v1p4beta1.types.ReferenceImage]): The reference image to create. + reference_image (Union[dict, ~google.cloud.vision_v1p4beta1.types.ReferenceImage]): Required. The reference image to create. If an image ID is specified, it is ignored. If a dict is provided, it must be of the same form as the protobuf @@ -1273,15 +1268,11 @@ def delete_reference_image( Permanently deletes a reference image. The image metadata will be deleted right away, but search queries - against ProductSets containing the image may still work until all - related caches are refreshed. + against ProductSets containing the image may still work until all related + caches are refreshed. The actual image files are not deleted from Google Cloud Storage. - Possible errors: - - - Returns NOT\_FOUND if the reference image does not exist. - Example: >>> from google.cloud import vision_v1p4beta1 >>> @@ -1292,7 +1283,7 @@ def delete_reference_image( >>> client.delete_reference_image(name) Args: - name (str): The resource name of the reference image to delete. + name (str): Required. The resource name of the reference image to delete. Format is: @@ -1381,7 +1372,7 @@ def list_reference_images( ... pass Args: - parent (str): Resource name of the product containing the reference images. + parent (str): Required. Resource name of the product containing the reference images. Format is ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID``. page_size (int): The maximum number of resources contained in the @@ -1477,7 +1468,7 @@ def get_reference_image( >>> response = client.get_reference_image(name) Args: - name (str): The resource name of the ReferenceImage to get. + name (str): Required. The resource name of the ReferenceImage to get. Format is: @@ -1561,11 +1552,12 @@ def add_product_to_product_set( >>> client.add_product_to_product_set(name, product) Args: - name (str): The resource name for the ProductSet to modify. + name (str): Required. The resource name for the ProductSet to modify. Format is: ``projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`` - product (str): The resource name for the Product to be added to this ProductSet. + product (str): Required. The resource name for the Product to be added to this + ProductSet. Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1626,10 +1618,6 @@ def remove_product_from_product_set( """ Removes a Product from the specified ProductSet. - Possible errors: - - - Returns NOT\_FOUND If the Product is not found under the ProductSet. - Example: >>> from google.cloud import vision_v1p4beta1 >>> @@ -1643,11 +1631,12 @@ def remove_product_from_product_set( >>> client.remove_product_from_product_set(name, product) Args: - name (str): The resource name for the ProductSet to modify. + name (str): Required. The resource name for the ProductSet to modify. Format is: ``projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`` - product (str): The resource name for the Product to be removed from this ProductSet. + product (str): Required. The resource name for the Product to be removed from this + ProductSet. Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1739,7 +1728,7 @@ def list_products_in_product_set( ... pass Args: - name (str): The ProductSet resource for which to retrieve Products. + name (str): Required. The ProductSet resource for which to retrieve Products. Format is: ``projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`` @@ -1857,10 +1846,10 @@ def import_product_sets( >>> metadata = response.metadata() Args: - parent (str): The project in which the ProductSets should be imported. + parent (str): Required. The project in which the ProductSets should be imported. Format is ``projects/PROJECT_ID/locations/LOC_ID``. - input_config (Union[dict, ~google.cloud.vision_v1p4beta1.types.ImportProductSetsInputConfig]): The input content for the list of requests. + input_config (Union[dict, ~google.cloud.vision_v1p4beta1.types.ImportProductSetsInputConfig]): Required. The input content for the list of requests. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1p4beta1.types.ImportProductSetsInputConfig` @@ -1919,3 +1908,119 @@ def import_product_sets( product_search_service_pb2.ImportProductSetsResponse, metadata_type=product_search_service_pb2.BatchOperationMetadata, ) + + def purge_products( + self, + product_set_purge_config=None, + delete_orphan_products=None, + parent=None, + force=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Asynchronous API to delete all Products in a ProductSet or all Products + that are in no ProductSet. + + If a Product is a member of the specified ProductSet in addition to + other ProductSets, the Product will still be deleted. + + It is recommended to not delete the specified ProductSet until after + this operation has completed. It is also recommended to not add any of + the Products involved in the batch delete to a new ProductSet while this + operation is running because those Products may still end up deleted. + + It's not possible to undo the PurgeProducts operation. Therefore, it is + recommended to keep the csv files used in ImportProductSets (if that was + how you originally built the Product Set) before starting PurgeProducts, + in case you need to re-import the data after deletion. + + If the plan is to purge all of the Products from a ProductSet and then + re-use the empty ProductSet to re-import new Products into the empty + ProductSet, you must wait until the PurgeProducts operation has finished + for that ProductSet. + + The ``google.longrunning.Operation`` API can be used to keep track of + the progress and results of the request. ``Operation.metadata`` contains + ``BatchOperationMetadata``. (progress) + + Example: + >>> from google.cloud import vision_v1p4beta1 + >>> + >>> client = vision_v1p4beta1.ProductSearchClient() + >>> + >>> response = client.purge_products() + + Args: + product_set_purge_config (Union[dict, ~google.cloud.vision_v1p4beta1.types.ProductSetPurgeConfig]): Specify which ProductSet contains the Products to be deleted. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.vision_v1p4beta1.types.ProductSetPurgeConfig` + delete_orphan_products (bool): If delete\_orphan\_products is true, all Products that are not in any + ProductSet will be deleted. + parent (str): Required. The project and location in which the Products should be + deleted. + + Format is ``projects/PROJECT_ID/locations/LOC_ID``. + force (bool): The default value is false. Override this value to true to actually perform + the purge. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.vision_v1p4beta1.types.Operation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "purge_products" not in self._inner_api_calls: + self._inner_api_calls[ + "purge_products" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.purge_products, + default_retry=self._method_configs["PurgeProducts"].retry, + default_timeout=self._method_configs["PurgeProducts"].timeout, + client_info=self._client_info, + ) + + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof( + product_set_purge_config=product_set_purge_config, + delete_orphan_products=delete_orphan_products, + ) + + request = product_search_service_pb2.PurgeProductsRequest( + product_set_purge_config=product_set_purge_config, + delete_orphan_products=delete_orphan_products, + parent=parent, + force=force, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["purge_products"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/vision/google/cloud/vision_v1p4beta1/gapic/product_search_client_config.py b/vision/google/cloud/vision_v1p4beta1/gapic/product_search_client_config.py index 97a89304553b..8b8f4f67c4f1 100644 --- a/vision/google/cloud/vision_v1p4beta1/gapic/product_search_client_config.py +++ b/vision/google/cloud/vision_v1p4beta1/gapic/product_search_client_config.py @@ -107,6 +107,11 @@ "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, + "PurgeProducts": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, }, } } diff --git a/vision/google/cloud/vision_v1p4beta1/gapic/transports/product_search_grpc_transport.py b/vision/google/cloud/vision_v1p4beta1/gapic/transports/product_search_grpc_transport.py index cd645bf50e0c..9cc5824dfa84 100644 --- a/vision/google/cloud/vision_v1p4beta1/gapic/transports/product_search_grpc_transport.py +++ b/vision/google/cloud/vision_v1p4beta1/gapic/transports/product_search_grpc_transport.py @@ -202,10 +202,6 @@ def delete_product_set(self): The actual image files are not deleted from Google Cloud Storage. - Possible errors: - - - Returns NOT\_FOUND if the ProductSet does not exist. - Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a @@ -302,13 +298,9 @@ def delete_product(self): Permanently deletes a product and its reference images. - Metadata of the product and all its images will be deleted right away, - but search queries against ProductSets containing the product may still - work until all related caches are refreshed. - - Possible errors: - - - Returns NOT\_FOUND if the product does not exist. + Metadata of the product and all its images will be deleted right away, but + search queries against ProductSets containing the product may still work + until all related caches are refreshed. Returns: Callable: A callable which accepts the appropriate @@ -357,15 +349,11 @@ def delete_reference_image(self): Permanently deletes a reference image. The image metadata will be deleted right away, but search queries - against ProductSets containing the image may still work until all - related caches are refreshed. + against ProductSets containing the image may still work until all related + caches are refreshed. The actual image files are not deleted from Google Cloud Storage. - Possible errors: - - - Returns NOT\_FOUND if the reference image does not exist. - Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a @@ -435,10 +423,6 @@ def remove_product_from_product_set(self): Removes a Product from the specified ProductSet. - Possible errors: - - - Returns NOT\_FOUND If the Product is not found under the ProductSet. - Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a @@ -488,3 +472,39 @@ def import_product_sets(self): deserialized response object. """ return self._stubs["product_search_stub"].ImportProductSets + + @property + def purge_products(self): + """Return the gRPC stub for :meth:`ProductSearchClient.purge_products`. + + Asynchronous API to delete all Products in a ProductSet or all Products + that are in no ProductSet. + + If a Product is a member of the specified ProductSet in addition to + other ProductSets, the Product will still be deleted. + + It is recommended to not delete the specified ProductSet until after + this operation has completed. It is also recommended to not add any of + the Products involved in the batch delete to a new ProductSet while this + operation is running because those Products may still end up deleted. + + It's not possible to undo the PurgeProducts operation. Therefore, it is + recommended to keep the csv files used in ImportProductSets (if that was + how you originally built the Product Set) before starting PurgeProducts, + in case you need to re-import the data after deletion. + + If the plan is to purge all of the Products from a ProductSet and then + re-use the empty ProductSet to re-import new Products into the empty + ProductSet, you must wait until the PurgeProducts operation has finished + for that ProductSet. + + The ``google.longrunning.Operation`` API can be used to keep track of + the progress and results of the request. ``Operation.metadata`` contains + ``BatchOperationMetadata``. (progress) + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["product_search_stub"].PurgeProducts diff --git a/vision/google/cloud/vision_v1p4beta1/proto/face.proto b/vision/google/cloud/vision_v1p4beta1/proto/face.proto new file mode 100644 index 000000000000..1786f26f0655 --- /dev/null +++ b/vision/google/cloud/vision_v1p4beta1/proto/face.proto @@ -0,0 +1,61 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.vision.v1p4beta1; + +import "google/api/annotations.proto"; +import "google/cloud/vision/v1p4beta1/geometry.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision"; +option java_multiple_files = true; +option java_outer_classname = "CelebrityProto"; +option java_package = "com.google.cloud.vision.v1p4beta1"; +option objc_class_prefix = "GCVN"; + +// Parameters for a celebrity recognition request. +message FaceRecognitionParams { + // The resource names for one or more + // [CelebritySet][google.cloud.vision.v1p4beta1.CelebritySet]s. A celebrity + // set is preloaded and can be specified as "builtin/default". If this is + // specified, the algorithm will try to match the faces detected in the input + // image to the Celebrities in the CelebritySets. + repeated string celebrity_set = 1; +} + +// A Celebrity is a group of Faces with an identity. +message Celebrity { + // The resource name of the preloaded Celebrity. Has the format + // `builtin/{mid}`. + string name = 1; + + // The Celebrity's display name. + string display_name = 2; + + // The Celebrity's description. + string description = 3; +} + +// Information about a face's identity. +message FaceRecognitionResult { + // The [Celebrity][google.cloud.vision.v1p4beta1.Celebrity] that this face was + // matched to. + Celebrity celebrity = 1; + + // Recognition confidence. Range [0, 1]. + float confidence = 2; +} diff --git a/vision/google/cloud/vision_v1p4beta1/proto/face_pb2.py b/vision/google/cloud/vision_v1p4beta1/proto/face_pb2.py new file mode 100644 index 000000000000..8fd9712fda23 --- /dev/null +++ b/vision/google/cloud/vision_v1p4beta1/proto/face_pb2.py @@ -0,0 +1,286 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/vision_v1p4beta1/proto/face.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.vision_v1p4beta1.proto import ( + geometry_pb2 as google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_geometry__pb2, +) + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/vision_v1p4beta1/proto/face.proto", + package="google.cloud.vision.v1p4beta1", + syntax="proto3", + serialized_options=_b( + "\n!com.google.cloud.vision.v1p4beta1B\016CelebrityProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\370\001\001\242\002\004GCVN" + ), + serialized_pb=_b( + '\n.google/cloud/vision_v1p4beta1/proto/face.proto\x12\x1dgoogle.cloud.vision.v1p4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x32google/cloud/vision_v1p4beta1/proto/geometry.proto".\n\x15\x46\x61\x63\x65RecognitionParams\x12\x15\n\rcelebrity_set\x18\x01 \x03(\t"D\n\tCelebrity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t"h\n\x15\x46\x61\x63\x65RecognitionResult\x12;\n\tcelebrity\x18\x01 \x01(\x0b\x32(.google.cloud.vision.v1p4beta1.Celebrity\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x42\x84\x01\n!com.google.cloud.vision.v1p4beta1B\x0e\x43\x65lebrityProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\xf8\x01\x01\xa2\x02\x04GCVNb\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_geometry__pb2.DESCRIPTOR, + ], +) + + +_FACERECOGNITIONPARAMS = _descriptor.Descriptor( + name="FaceRecognitionParams", + full_name="google.cloud.vision.v1p4beta1.FaceRecognitionParams", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="celebrity_set", + full_name="google.cloud.vision.v1p4beta1.FaceRecognitionParams.celebrity_set", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=163, + serialized_end=209, +) + + +_CELEBRITY = _descriptor.Descriptor( + name="Celebrity", + full_name="google.cloud.vision.v1p4beta1.Celebrity", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.vision.v1p4beta1.Celebrity.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="display_name", + full_name="google.cloud.vision.v1p4beta1.Celebrity.display_name", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="google.cloud.vision.v1p4beta1.Celebrity.description", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=211, + serialized_end=279, +) + + +_FACERECOGNITIONRESULT = _descriptor.Descriptor( + name="FaceRecognitionResult", + full_name="google.cloud.vision.v1p4beta1.FaceRecognitionResult", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="celebrity", + full_name="google.cloud.vision.v1p4beta1.FaceRecognitionResult.celebrity", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="confidence", + full_name="google.cloud.vision.v1p4beta1.FaceRecognitionResult.confidence", + index=1, + number=2, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=281, + serialized_end=385, +) + +_FACERECOGNITIONRESULT.fields_by_name["celebrity"].message_type = _CELEBRITY +DESCRIPTOR.message_types_by_name["FaceRecognitionParams"] = _FACERECOGNITIONPARAMS +DESCRIPTOR.message_types_by_name["Celebrity"] = _CELEBRITY +DESCRIPTOR.message_types_by_name["FaceRecognitionResult"] = _FACERECOGNITIONRESULT +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +FaceRecognitionParams = _reflection.GeneratedProtocolMessageType( + "FaceRecognitionParams", + (_message.Message,), + dict( + DESCRIPTOR=_FACERECOGNITIONPARAMS, + __module__="google.cloud.vision_v1p4beta1.proto.face_pb2", + __doc__="""Parameters for a celebrity recognition request. + + + Attributes: + celebrity_set: + The resource names for one or more + [CelebritySet][google.cloud.vision.v1p4beta1.CelebritySet]s. A + celebrity set is preloaded and can be specified as + "builtin/default". If this is specified, the algorithm will + try to match the faces detected in the input image to the + Celebrities in the CelebritySets. + """, + # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.FaceRecognitionParams) + ), +) +_sym_db.RegisterMessage(FaceRecognitionParams) + +Celebrity = _reflection.GeneratedProtocolMessageType( + "Celebrity", + (_message.Message,), + dict( + DESCRIPTOR=_CELEBRITY, + __module__="google.cloud.vision_v1p4beta1.proto.face_pb2", + __doc__="""A Celebrity is a group of Faces with an identity. + + + Attributes: + name: + The resource name of the preloaded Celebrity. Has the format + ``builtin/{mid}``. + display_name: + The Celebrity's display name. + description: + The Celebrity's description. + """, + # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.Celebrity) + ), +) +_sym_db.RegisterMessage(Celebrity) + +FaceRecognitionResult = _reflection.GeneratedProtocolMessageType( + "FaceRecognitionResult", + (_message.Message,), + dict( + DESCRIPTOR=_FACERECOGNITIONRESULT, + __module__="google.cloud.vision_v1p4beta1.proto.face_pb2", + __doc__="""Information about a face's identity. + + + Attributes: + celebrity: + The [Celebrity][google.cloud.vision.v1p4beta1.Celebrity] that + this face was matched to. + confidence: + Recognition confidence. Range [0, 1]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.FaceRecognitionResult) + ), +) +_sym_db.RegisterMessage(FaceRecognitionResult) + + +DESCRIPTOR._options = None +# @@protoc_insertion_point(module_scope) diff --git a/vision/google/cloud/vision_v1p4beta1/proto/face_pb2_grpc.py b/vision/google/cloud/vision_v1p4beta1/proto/face_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/vision/google/cloud/vision_v1p4beta1/proto/face_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/vision/google/cloud/vision_v1p4beta1/proto/geometry.proto b/vision/google/cloud/vision_v1p4beta1/proto/geometry.proto index b0abd329c026..18877188faea 100644 --- a/vision/google/cloud/vision_v1p4beta1/proto/geometry.proto +++ b/vision/google/cloud/vision_v1p4beta1/proto/geometry.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. diff --git a/vision/google/cloud/vision_v1p4beta1/proto/image_annotator.proto b/vision/google/cloud/vision_v1p4beta1/proto/image_annotator.proto index 30318044a676..e3553b0de02c 100644 --- a/vision/google/cloud/vision_v1p4beta1/proto/image_annotator.proto +++ b/vision/google/cloud/vision_v1p4beta1/proto/image_annotator.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,6 +18,9 @@ syntax = "proto3"; package google.cloud.vision.v1p4beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/cloud/vision/v1p4beta1/face.proto"; import "google/cloud/vision/v1p4beta1/geometry.proto"; import "google/cloud/vision/v1p4beta1/product_search.proto"; import "google/cloud/vision/v1p4beta1/text_annotation.proto"; @@ -40,6 +43,11 @@ option objc_class_prefix = "GCVN"; // images, such as face, landmark, logo, label, and text detection. The // ImageAnnotator service returns detected entities from the images. service ImageAnnotator { + option (google.api.default_host) = "vision.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/cloud-vision"; + // Run image detection and annotation for a batch of images. rpc BatchAnnotateImages(BatchAnnotateImagesRequest) returns (BatchAnnotateImagesResponse) { @@ -47,6 +55,7 @@ service ImageAnnotator { post: "/v1p4beta1/images:annotate" body: "*" }; + option (google.api.method_signature) = "requests"; } // Service that performs image detection and annotation for a batch of files. @@ -62,6 +71,7 @@ service ImageAnnotator { post: "/v1p4beta1/files:annotate" body: "*" }; + option (google.api.method_signature) = "requests"; } // Run asynchronous image detection and annotation for a list of images. @@ -79,6 +89,11 @@ service ImageAnnotator { post: "/v1p4beta1/images:asyncBatchAnnotate" body: "*" }; + option (google.api.method_signature) = "requests,output_config"; + option (google.longrunning.operation_info) = { + response_type: "AsyncBatchAnnotateImagesResponse" + metadata_type: "OperationMetadata" + }; } // Run asynchronous image detection and annotation for a list of generic @@ -93,6 +108,11 @@ service ImageAnnotator { post: "/v1p4beta1/files:asyncBatchAnnotate" body: "*" }; + option (google.api.method_signature) = "requests"; + option (google.longrunning.operation_info) = { + response_type: "AsyncBatchAnnotateFilesResponse" + metadata_type: "OperationMetadata" + }; } } @@ -209,19 +229,19 @@ enum Likelihood { // Unknown likelihood. UNKNOWN = 0; - // It is very unlikely that the image belongs to the specified vertical. + // It is very unlikely. VERY_UNLIKELY = 1; - // It is unlikely that the image belongs to the specified vertical. + // It is unlikely. UNLIKELY = 2; - // It is possible that the image belongs to the specified vertical. + // It is possible. POSSIBLE = 3; - // It is likely that the image belongs to the specified vertical. + // It is likely. LIKELY = 4; - // It is very likely that the image belongs to the specified vertical. + // It is very likely. VERY_LIKELY = 5; } @@ -407,6 +427,13 @@ message FaceAnnotation { // Headwear likelihood. Likelihood headwear_likelihood = 15; + + // Additional recognition information. Only computed if + // image_context.face_recognition_params is provided, **and** a match is found + // to a [Celebrity][google.cloud.vision.v1p4beta1.Celebrity] in the input + // [CelebritySet][google.cloud.vision.v1p4beta1.CelebritySet]. This field is + // sorted in order of decreasing confidence values. + repeated FaceRecognitionResult recognition_result = 16; } // Detected entity location information. @@ -611,6 +638,9 @@ message ImageContext { // Parameters for crop hints annotation request. CropHintsParams crop_hints_params = 4; + // Parameters for face recognition. + FaceRecognitionParams face_recognition_params = 10; + // Parameters for product search. ProductSearchParams product_search_params = 5; @@ -694,19 +724,6 @@ message AnnotateImageResponse { ImageAnnotationContext context = 21; } -// Response to a single file annotation request. A file may contain one or more -// images, which individually have their own responses. -message AnnotateFileResponse { - // Information about the file for which this response is generated. - InputConfig input_config = 1; - - // Individual responses to images found within the file. - repeated AnnotateImageResponse responses = 2; - - // This field gives the total number of pages in the file. - int32 total_pages = 3; -} - // Multiple image annotation requests are batched into a single service call. message BatchAnnotateImagesRequest { // Individual image annotation requests for this batch. @@ -747,11 +764,30 @@ message AnnotateFileRequest { repeated int32 pages = 4; } +// Response to a single file annotation request. A file may contain one or more +// images, which individually have their own responses. +message AnnotateFileResponse { + // Information about the file for which this response is generated. + InputConfig input_config = 1; + + // Individual responses to images found within the file. This field will be + // empty if the `error` field is set. + repeated AnnotateImageResponse responses = 2; + + // This field gives the total number of pages in the file. + int32 total_pages = 3; + + // If set, represents the error message for the failed request. The + // `responses` field will not be set in this case. + google.rpc.Status error = 4; +} + // A list of requests to annotate files using the BatchAnnotateFiles API. message BatchAnnotateFilesRequest { - // The list of file annotation requests. Right now we support only one - // AnnotateFileRequest in BatchAnnotateFilesRequest. - repeated AnnotateFileRequest requests = 1; + // Required. The list of file annotation requests. Right now we support only + // one AnnotateFileRequest in BatchAnnotateFilesRequest. + repeated AnnotateFileRequest requests = 1 + [(google.api.field_behavior) = REQUIRED]; } // A list of file annotation responses. @@ -784,11 +820,12 @@ message AsyncAnnotateFileResponse { // Request for async image annotation for a list of images. message AsyncBatchAnnotateImagesRequest { - // Individual image annotation requests for this batch. - repeated AnnotateImageRequest requests = 1; + // Required. Individual image annotation requests for this batch. + repeated AnnotateImageRequest requests = 1 + [(google.api.field_behavior) = REQUIRED]; // Required. The desired output location and metadata (e.g. format). - OutputConfig output_config = 2; + OutputConfig output_config = 2 [(google.api.field_behavior) = REQUIRED]; } // Response to an async batch image annotation request. @@ -800,8 +837,9 @@ message AsyncBatchAnnotateImagesResponse { // Multiple async file annotation requests are batched into a single service // call. message AsyncBatchAnnotateFilesRequest { - // Individual async file annotation requests for this batch. - repeated AsyncAnnotateFileRequest requests = 1; + // Required. Individual async file annotation requests for this batch. + repeated AsyncAnnotateFileRequest requests = 1 + [(google.api.field_behavior) = REQUIRED]; } // Response to an async batch file annotation request. @@ -824,8 +862,8 @@ message InputConfig { // not work for AsyncBatchAnnotateFiles requests. bytes content = 3; - // The type of the file. Currently only "application/pdf" and "image/tiff" - // are supported. Wildcards are not supported. + // The type of the file. Currently only "application/pdf", "image/tiff" and + // "image/gif" are supported. Wildcards are not supported. string mime_type = 2; } @@ -857,16 +895,23 @@ message GcsSource { // The Google Cloud Storage location where the output will be written to. message GcsDestination { - // Google Cloud Storage URI where the results will be stored. Results will - // be in JSON format and preceded by its corresponding input URI. This field - // can either represent a single file, or a prefix for multiple outputs. - // Prefixes must end in a `/`. + // Google Cloud Storage URI prefix where the results will be stored. Results + // will be in JSON format and preceded by its corresponding input URI prefix. + // This field can either represent a gcs file prefix or gcs directory. In + // either case, the uri should be unique because in order to get all of the + // output files, you will need to do a wildcard gcs search on the uri prefix + // you provide. // // Examples: // - // * File: gs://bucket-name/filename.json - // * Prefix: gs://bucket-name/prefix/here/ - // * File: gs://bucket-name/prefix/here + // * File Prefix: gs://bucket-name/here/filenameprefix The output files + // will be created in gs://bucket-name/here/ and the names of the + // output files will begin with "filenameprefix". + // + // * Directory Prefix: gs://bucket-name/some/location/ The output files + // will be created in gs://bucket-name/some/location/ and the names of the + // output files could be anything because there was no filename prefix + // specified. // // If multiple outputs, each response is still AnnotateFileResponse, each of // which contains some subset of the full list of AnnotateImageResponse. diff --git a/vision/google/cloud/vision_v1p4beta1/proto/image_annotator_pb2.py b/vision/google/cloud/vision_v1p4beta1/proto/image_annotator_pb2.py index a162a9f4d9d6..5d0bb3d90365 100644 --- a/vision/google/cloud/vision_v1p4beta1/proto/image_annotator_pb2.py +++ b/vision/google/cloud/vision_v1p4beta1/proto/image_annotator_pb2.py @@ -17,6 +17,11 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.cloud.vision_v1p4beta1.proto import ( + face_pb2 as google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_face__pb2, +) from google.cloud.vision_v1p4beta1.proto import ( geometry_pb2 as google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_geometry__pb2, ) @@ -47,10 +52,13 @@ "\n!com.google.cloud.vision.v1p4beta1B\023ImageAnnotatorProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\370\001\001\242\002\004GCVN" ), serialized_pb=_b( - '\n9google/cloud/vision_v1p4beta1/proto/image_annotator.proto\x12\x1dgoogle.cloud.vision.v1p4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x32google/cloud/vision_v1p4beta1/proto/geometry.proto\x1a\x38google/cloud/vision_v1p4beta1/proto/product_search.proto\x1a\x39google/cloud/vision_v1p4beta1/proto/text_annotation.proto\x1a\x37google/cloud/vision_v1p4beta1/proto/web_detection.proto\x1a#google/longrunning/operations.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x17google/type/color.proto\x1a\x18google/type/latlng.proto"\x8e\x03\n\x07\x46\x65\x61ture\x12\x39\n\x04type\x18\x01 \x01(\x0e\x32+.google.cloud.vision.v1p4beta1.Feature.Type\x12\x13\n\x0bmax_results\x18\x02 \x01(\x05\x12\r\n\x05model\x18\x03 \x01(\t"\xa3\x02\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x46\x41\x43\x45_DETECTION\x10\x01\x12\x16\n\x12LANDMARK_DETECTION\x10\x02\x12\x12\n\x0eLOGO_DETECTION\x10\x03\x12\x13\n\x0fLABEL_DETECTION\x10\x04\x12\x12\n\x0eTEXT_DETECTION\x10\x05\x12\x1b\n\x17\x44OCUMENT_TEXT_DETECTION\x10\x0b\x12\x19\n\x15SAFE_SEARCH_DETECTION\x10\x06\x12\x14\n\x10IMAGE_PROPERTIES\x10\x07\x12\x0e\n\nCROP_HINTS\x10\t\x12\x11\n\rWEB_DETECTION\x10\n\x12\x12\n\x0ePRODUCT_SEARCH\x10\x0c\x12\x17\n\x13OBJECT_LOCALIZATION\x10\x13"7\n\x0bImageSource\x12\x15\n\rgcs_image_uri\x18\x01 \x01(\t\x12\x11\n\timage_uri\x18\x02 \x01(\t"T\n\x05Image\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\x0c\x12:\n\x06source\x18\x02 \x01(\x0b\x32*.google.cloud.vision.v1p4beta1.ImageSource"\x9b\x0e\n\x0e\x46\x61\x63\x65\x41nnotation\x12\x42\n\rbounding_poly\x18\x01 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly\x12\x45\n\x10\x66\x64_bounding_poly\x18\x02 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly\x12I\n\tlandmarks\x18\x03 \x03(\x0b\x32\x36.google.cloud.vision.v1p4beta1.FaceAnnotation.Landmark\x12\x12\n\nroll_angle\x18\x04 \x01(\x02\x12\x11\n\tpan_angle\x18\x05 \x01(\x02\x12\x12\n\ntilt_angle\x18\x06 \x01(\x02\x12\x1c\n\x14\x64\x65tection_confidence\x18\x07 \x01(\x02\x12\x1e\n\x16landmarking_confidence\x18\x08 \x01(\x02\x12\x41\n\x0ejoy_likelihood\x18\t \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x44\n\x11sorrow_likelihood\x18\n \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x43\n\x10\x61nger_likelihood\x18\x0b \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x46\n\x13surprise_likelihood\x18\x0c \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12K\n\x18under_exposed_likelihood\x18\r \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x45\n\x12\x62lurred_likelihood\x18\x0e \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x46\n\x13headwear_likelihood\x18\x0f \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x1a\xc7\x07\n\x08Landmark\x12I\n\x04type\x18\x03 \x01(\x0e\x32;.google.cloud.vision.v1p4beta1.FaceAnnotation.Landmark.Type\x12\x39\n\x08position\x18\x04 \x01(\x0b\x32\'.google.cloud.vision.v1p4beta1.Position"\xb4\x06\n\x04Type\x12\x14\n\x10UNKNOWN_LANDMARK\x10\x00\x12\x0c\n\x08LEFT_EYE\x10\x01\x12\r\n\tRIGHT_EYE\x10\x02\x12\x18\n\x14LEFT_OF_LEFT_EYEBROW\x10\x03\x12\x19\n\x15RIGHT_OF_LEFT_EYEBROW\x10\x04\x12\x19\n\x15LEFT_OF_RIGHT_EYEBROW\x10\x05\x12\x1a\n\x16RIGHT_OF_RIGHT_EYEBROW\x10\x06\x12\x19\n\x15MIDPOINT_BETWEEN_EYES\x10\x07\x12\x0c\n\x08NOSE_TIP\x10\x08\x12\r\n\tUPPER_LIP\x10\t\x12\r\n\tLOWER_LIP\x10\n\x12\x0e\n\nMOUTH_LEFT\x10\x0b\x12\x0f\n\x0bMOUTH_RIGHT\x10\x0c\x12\x10\n\x0cMOUTH_CENTER\x10\r\x12\x15\n\x11NOSE_BOTTOM_RIGHT\x10\x0e\x12\x14\n\x10NOSE_BOTTOM_LEFT\x10\x0f\x12\x16\n\x12NOSE_BOTTOM_CENTER\x10\x10\x12\x19\n\x15LEFT_EYE_TOP_BOUNDARY\x10\x11\x12\x19\n\x15LEFT_EYE_RIGHT_CORNER\x10\x12\x12\x1c\n\x18LEFT_EYE_BOTTOM_BOUNDARY\x10\x13\x12\x18\n\x14LEFT_EYE_LEFT_CORNER\x10\x14\x12\x1a\n\x16RIGHT_EYE_TOP_BOUNDARY\x10\x15\x12\x1a\n\x16RIGHT_EYE_RIGHT_CORNER\x10\x16\x12\x1d\n\x19RIGHT_EYE_BOTTOM_BOUNDARY\x10\x17\x12\x19\n\x15RIGHT_EYE_LEFT_CORNER\x10\x18\x12\x1f\n\x1bLEFT_EYEBROW_UPPER_MIDPOINT\x10\x19\x12 \n\x1cRIGHT_EYEBROW_UPPER_MIDPOINT\x10\x1a\x12\x14\n\x10LEFT_EAR_TRAGION\x10\x1b\x12\x15\n\x11RIGHT_EAR_TRAGION\x10\x1c\x12\x12\n\x0eLEFT_EYE_PUPIL\x10\x1d\x12\x13\n\x0fRIGHT_EYE_PUPIL\x10\x1e\x12\x15\n\x11\x46OREHEAD_GLABELLA\x10\x1f\x12\x11\n\rCHIN_GNATHION\x10 \x12\x14\n\x10\x43HIN_LEFT_GONION\x10!\x12\x15\n\x11\x43HIN_RIGHT_GONION\x10""4\n\x0cLocationInfo\x12$\n\x07lat_lng\x18\x01 \x01(\x0b\x32\x13.google.type.LatLng"=\n\x08Property\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\x12\x14\n\x0cuint64_value\x18\x03 \x01(\x04"\xc0\x02\n\x10\x45ntityAnnotation\x12\x0b\n\x03mid\x18\x01 \x01(\t\x12\x0e\n\x06locale\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\r\n\x05score\x18\x04 \x01(\x02\x12\x16\n\nconfidence\x18\x05 \x01(\x02\x42\x02\x18\x01\x12\x12\n\ntopicality\x18\x06 \x01(\x02\x12\x42\n\rbounding_poly\x18\x07 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly\x12>\n\tlocations\x18\x08 \x03(\x0b\x32+.google.cloud.vision.v1p4beta1.LocationInfo\x12;\n\nproperties\x18\t \x03(\x0b\x32\'.google.cloud.vision.v1p4beta1.Property"\xa0\x01\n\x19LocalizedObjectAnnotation\x12\x0b\n\x03mid\x18\x01 \x01(\t\x12\x15\n\rlanguage_code\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\r\n\x05score\x18\x04 \x01(\x02\x12\x42\n\rbounding_poly\x18\x05 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly"\xbc\x02\n\x14SafeSearchAnnotation\x12\x38\n\x05\x61\x64ult\x18\x01 \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x38\n\x05spoof\x18\x02 \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12:\n\x07medical\x18\x03 \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12;\n\x08violence\x18\x04 \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x37\n\x04racy\x18\t \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood"a\n\x0bLatLongRect\x12(\n\x0bmin_lat_lng\x18\x01 \x01(\x0b\x32\x13.google.type.LatLng\x12(\n\x0bmax_lat_lng\x18\x02 \x01(\x0b\x32\x13.google.type.LatLng"U\n\tColorInfo\x12!\n\x05\x63olor\x18\x01 \x01(\x0b\x32\x12.google.type.Color\x12\r\n\x05score\x18\x02 \x01(\x02\x12\x16\n\x0epixel_fraction\x18\x03 \x01(\x02"T\n\x18\x44ominantColorsAnnotation\x12\x38\n\x06\x63olors\x18\x01 \x03(\x0b\x32(.google.cloud.vision.v1p4beta1.ColorInfo"c\n\x0fImageProperties\x12P\n\x0f\x64ominant_colors\x18\x01 \x01(\x0b\x32\x37.google.cloud.vision.v1p4beta1.DominantColorsAnnotation"\x7f\n\x08\x43ropHint\x12\x42\n\rbounding_poly\x18\x01 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\x1b\n\x13importance_fraction\x18\x03 \x01(\x02"R\n\x13\x43ropHintsAnnotation\x12;\n\ncrop_hints\x18\x01 \x03(\x0b\x32\'.google.cloud.vision.v1p4beta1.CropHint"(\n\x0f\x43ropHintsParams\x12\x15\n\raspect_ratios\x18\x01 \x03(\x02"1\n\x12WebDetectionParams\x12\x1b\n\x13include_geo_results\x18\x02 \x01(\x08"\xd8\x02\n\x0cImageContext\x12\x41\n\rlat_long_rect\x18\x01 \x01(\x0b\x32*.google.cloud.vision.v1p4beta1.LatLongRect\x12\x16\n\x0elanguage_hints\x18\x02 \x03(\t\x12I\n\x11\x63rop_hints_params\x18\x04 \x01(\x0b\x32..google.cloud.vision.v1p4beta1.CropHintsParams\x12Q\n\x15product_search_params\x18\x05 \x01(\x0b\x32\x32.google.cloud.vision.v1p4beta1.ProductSearchParams\x12O\n\x14web_detection_params\x18\x06 \x01(\x0b\x32\x31.google.cloud.vision.v1p4beta1.WebDetectionParams"\xc9\x01\n\x14\x41nnotateImageRequest\x12\x33\n\x05image\x18\x01 \x01(\x0b\x32$.google.cloud.vision.v1p4beta1.Image\x12\x38\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32&.google.cloud.vision.v1p4beta1.Feature\x12\x42\n\rimage_context\x18\x03 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.ImageContext":\n\x16ImageAnnotationContext\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12\x13\n\x0bpage_number\x18\x02 \x01(\x05"\xbf\x08\n\x15\x41nnotateImageResponse\x12G\n\x10\x66\x61\x63\x65_annotations\x18\x01 \x03(\x0b\x32-.google.cloud.vision.v1p4beta1.FaceAnnotation\x12M\n\x14landmark_annotations\x18\x02 \x03(\x0b\x32/.google.cloud.vision.v1p4beta1.EntityAnnotation\x12I\n\x10logo_annotations\x18\x03 \x03(\x0b\x32/.google.cloud.vision.v1p4beta1.EntityAnnotation\x12J\n\x11label_annotations\x18\x04 \x03(\x0b\x32/.google.cloud.vision.v1p4beta1.EntityAnnotation\x12^\n\x1clocalized_object_annotations\x18\x16 \x03(\x0b\x32\x38.google.cloud.vision.v1p4beta1.LocalizedObjectAnnotation\x12I\n\x10text_annotations\x18\x05 \x03(\x0b\x32/.google.cloud.vision.v1p4beta1.EntityAnnotation\x12K\n\x14\x66ull_text_annotation\x18\x0c \x01(\x0b\x32-.google.cloud.vision.v1p4beta1.TextAnnotation\x12S\n\x16safe_search_annotation\x18\x06 \x01(\x0b\x32\x33.google.cloud.vision.v1p4beta1.SafeSearchAnnotation\x12S\n\x1bimage_properties_annotation\x18\x08 \x01(\x0b\x32..google.cloud.vision.v1p4beta1.ImageProperties\x12Q\n\x15\x63rop_hints_annotation\x18\x0b \x01(\x0b\x32\x32.google.cloud.vision.v1p4beta1.CropHintsAnnotation\x12\x42\n\rweb_detection\x18\r \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.WebDetection\x12S\n\x16product_search_results\x18\x0e \x01(\x0b\x32\x33.google.cloud.vision.v1p4beta1.ProductSearchResults\x12!\n\x05\x65rror\x18\t \x01(\x0b\x32\x12.google.rpc.Status\x12\x46\n\x07\x63ontext\x18\x15 \x01(\x0b\x32\x35.google.cloud.vision.v1p4beta1.ImageAnnotationContext"\xb6\x01\n\x14\x41nnotateFileResponse\x12@\n\x0cinput_config\x18\x01 \x01(\x0b\x32*.google.cloud.vision.v1p4beta1.InputConfig\x12G\n\tresponses\x18\x02 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.AnnotateImageResponse\x12\x13\n\x0btotal_pages\x18\x03 \x01(\x05"c\n\x1a\x42\x61tchAnnotateImagesRequest\x12\x45\n\x08requests\x18\x01 \x03(\x0b\x32\x33.google.cloud.vision.v1p4beta1.AnnotateImageRequest"f\n\x1b\x42\x61tchAnnotateImagesResponse\x12G\n\tresponses\x18\x01 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.AnnotateImageResponse"\xe4\x01\n\x13\x41nnotateFileRequest\x12@\n\x0cinput_config\x18\x01 \x01(\x0b\x32*.google.cloud.vision.v1p4beta1.InputConfig\x12\x38\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32&.google.cloud.vision.v1p4beta1.Feature\x12\x42\n\rimage_context\x18\x03 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.ImageContext\x12\r\n\x05pages\x18\x04 \x03(\x05"a\n\x19\x42\x61tchAnnotateFilesRequest\x12\x44\n\x08requests\x18\x01 \x03(\x0b\x32\x32.google.cloud.vision.v1p4beta1.AnnotateFileRequest"d\n\x1a\x42\x61tchAnnotateFilesResponse\x12\x46\n\tresponses\x18\x01 \x03(\x0b\x32\x33.google.cloud.vision.v1p4beta1.AnnotateFileResponse"\x9e\x02\n\x18\x41syncAnnotateFileRequest\x12@\n\x0cinput_config\x18\x01 \x01(\x0b\x32*.google.cloud.vision.v1p4beta1.InputConfig\x12\x38\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32&.google.cloud.vision.v1p4beta1.Feature\x12\x42\n\rimage_context\x18\x03 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.ImageContext\x12\x42\n\routput_config\x18\x04 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.OutputConfig"_\n\x19\x41syncAnnotateFileResponse\x12\x42\n\routput_config\x18\x01 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.OutputConfig"\xac\x01\n\x1f\x41syncBatchAnnotateImagesRequest\x12\x45\n\x08requests\x18\x01 \x03(\x0b\x32\x33.google.cloud.vision.v1p4beta1.AnnotateImageRequest\x12\x42\n\routput_config\x18\x02 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.OutputConfig"f\n AsyncBatchAnnotateImagesResponse\x12\x42\n\routput_config\x18\x01 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.OutputConfig"k\n\x1e\x41syncBatchAnnotateFilesRequest\x12I\n\x08requests\x18\x01 \x03(\x0b\x32\x37.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequest"n\n\x1f\x41syncBatchAnnotateFilesResponse\x12K\n\tresponses\x18\x01 \x03(\x0b\x32\x38.google.cloud.vision.v1p4beta1.AsyncAnnotateFileResponse"o\n\x0bInputConfig\x12<\n\ngcs_source\x18\x01 \x01(\x0b\x32(.google.cloud.vision.v1p4beta1.GcsSource\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\x0c\x12\x11\n\tmime_type\x18\x02 \x01(\t"j\n\x0cOutputConfig\x12\x46\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32-.google.cloud.vision.v1p4beta1.GcsDestination\x12\x12\n\nbatch_size\x18\x02 \x01(\x05"\x18\n\tGcsSource\x12\x0b\n\x03uri\x18\x01 \x01(\t"\x1d\n\x0eGcsDestination\x12\x0b\n\x03uri\x18\x01 \x01(\t"\x8f\x02\n\x11OperationMetadata\x12\x45\n\x05state\x18\x01 \x01(\x0e\x32\x36.google.cloud.vision.v1p4beta1.OperationMetadata.State\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"Q\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x43REATED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03\x12\r\n\tCANCELLED\x10\x04*e\n\nLikelihood\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05\x32\xcf\x05\n\x0eImageAnnotator\x12\xb3\x01\n\x13\x42\x61tchAnnotateImages\x12\x39.google.cloud.vision.v1p4beta1.BatchAnnotateImagesRequest\x1a:.google.cloud.vision.v1p4beta1.BatchAnnotateImagesResponse"%\x82\xd3\xe4\x93\x02\x1f"\x1a/v1p4beta1/images:annotate:\x01*\x12\xaf\x01\n\x12\x42\x61tchAnnotateFiles\x12\x38.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest\x1a\x39.google.cloud.vision.v1p4beta1.BatchAnnotateFilesResponse"$\x82\xd3\xe4\x93\x02\x1e"\x19/v1p4beta1/files:annotate:\x01*\x12\xaa\x01\n\x18\x41syncBatchAnnotateImages\x12>.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateImagesRequest\x1a\x1d.google.longrunning.Operation"/\x82\xd3\xe4\x93\x02)"$/v1p4beta1/images:asyncBatchAnnotate:\x01*\x12\xa7\x01\n\x17\x41syncBatchAnnotateFiles\x12=.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest\x1a\x1d.google.longrunning.Operation".\x82\xd3\xe4\x93\x02("#/v1p4beta1/files:asyncBatchAnnotate:\x01*B\x89\x01\n!com.google.cloud.vision.v1p4beta1B\x13ImageAnnotatorProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\xf8\x01\x01\xa2\x02\x04GCVNb\x06proto3' + '\n9google/cloud/vision_v1p4beta1/proto/image_annotator.proto\x12\x1dgoogle.cloud.vision.v1p4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/vision_v1p4beta1/proto/face.proto\x1a\x32google/cloud/vision_v1p4beta1/proto/geometry.proto\x1a\x38google/cloud/vision_v1p4beta1/proto/product_search.proto\x1a\x39google/cloud/vision_v1p4beta1/proto/text_annotation.proto\x1a\x37google/cloud/vision_v1p4beta1/proto/web_detection.proto\x1a#google/longrunning/operations.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x17google/type/color.proto\x1a\x18google/type/latlng.proto"\x8e\x03\n\x07\x46\x65\x61ture\x12\x39\n\x04type\x18\x01 \x01(\x0e\x32+.google.cloud.vision.v1p4beta1.Feature.Type\x12\x13\n\x0bmax_results\x18\x02 \x01(\x05\x12\r\n\x05model\x18\x03 \x01(\t"\xa3\x02\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0e\x46\x41\x43\x45_DETECTION\x10\x01\x12\x16\n\x12LANDMARK_DETECTION\x10\x02\x12\x12\n\x0eLOGO_DETECTION\x10\x03\x12\x13\n\x0fLABEL_DETECTION\x10\x04\x12\x12\n\x0eTEXT_DETECTION\x10\x05\x12\x1b\n\x17\x44OCUMENT_TEXT_DETECTION\x10\x0b\x12\x19\n\x15SAFE_SEARCH_DETECTION\x10\x06\x12\x14\n\x10IMAGE_PROPERTIES\x10\x07\x12\x0e\n\nCROP_HINTS\x10\t\x12\x11\n\rWEB_DETECTION\x10\n\x12\x12\n\x0ePRODUCT_SEARCH\x10\x0c\x12\x17\n\x13OBJECT_LOCALIZATION\x10\x13"7\n\x0bImageSource\x12\x15\n\rgcs_image_uri\x18\x01 \x01(\t\x12\x11\n\timage_uri\x18\x02 \x01(\t"T\n\x05Image\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\x0c\x12:\n\x06source\x18\x02 \x01(\x0b\x32*.google.cloud.vision.v1p4beta1.ImageSource"\xed\x0e\n\x0e\x46\x61\x63\x65\x41nnotation\x12\x42\n\rbounding_poly\x18\x01 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly\x12\x45\n\x10\x66\x64_bounding_poly\x18\x02 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly\x12I\n\tlandmarks\x18\x03 \x03(\x0b\x32\x36.google.cloud.vision.v1p4beta1.FaceAnnotation.Landmark\x12\x12\n\nroll_angle\x18\x04 \x01(\x02\x12\x11\n\tpan_angle\x18\x05 \x01(\x02\x12\x12\n\ntilt_angle\x18\x06 \x01(\x02\x12\x1c\n\x14\x64\x65tection_confidence\x18\x07 \x01(\x02\x12\x1e\n\x16landmarking_confidence\x18\x08 \x01(\x02\x12\x41\n\x0ejoy_likelihood\x18\t \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x44\n\x11sorrow_likelihood\x18\n \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x43\n\x10\x61nger_likelihood\x18\x0b \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x46\n\x13surprise_likelihood\x18\x0c \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12K\n\x18under_exposed_likelihood\x18\r \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x45\n\x12\x62lurred_likelihood\x18\x0e \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x46\n\x13headwear_likelihood\x18\x0f \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12P\n\x12recognition_result\x18\x10 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.FaceRecognitionResult\x1a\xc7\x07\n\x08Landmark\x12I\n\x04type\x18\x03 \x01(\x0e\x32;.google.cloud.vision.v1p4beta1.FaceAnnotation.Landmark.Type\x12\x39\n\x08position\x18\x04 \x01(\x0b\x32\'.google.cloud.vision.v1p4beta1.Position"\xb4\x06\n\x04Type\x12\x14\n\x10UNKNOWN_LANDMARK\x10\x00\x12\x0c\n\x08LEFT_EYE\x10\x01\x12\r\n\tRIGHT_EYE\x10\x02\x12\x18\n\x14LEFT_OF_LEFT_EYEBROW\x10\x03\x12\x19\n\x15RIGHT_OF_LEFT_EYEBROW\x10\x04\x12\x19\n\x15LEFT_OF_RIGHT_EYEBROW\x10\x05\x12\x1a\n\x16RIGHT_OF_RIGHT_EYEBROW\x10\x06\x12\x19\n\x15MIDPOINT_BETWEEN_EYES\x10\x07\x12\x0c\n\x08NOSE_TIP\x10\x08\x12\r\n\tUPPER_LIP\x10\t\x12\r\n\tLOWER_LIP\x10\n\x12\x0e\n\nMOUTH_LEFT\x10\x0b\x12\x0f\n\x0bMOUTH_RIGHT\x10\x0c\x12\x10\n\x0cMOUTH_CENTER\x10\r\x12\x15\n\x11NOSE_BOTTOM_RIGHT\x10\x0e\x12\x14\n\x10NOSE_BOTTOM_LEFT\x10\x0f\x12\x16\n\x12NOSE_BOTTOM_CENTER\x10\x10\x12\x19\n\x15LEFT_EYE_TOP_BOUNDARY\x10\x11\x12\x19\n\x15LEFT_EYE_RIGHT_CORNER\x10\x12\x12\x1c\n\x18LEFT_EYE_BOTTOM_BOUNDARY\x10\x13\x12\x18\n\x14LEFT_EYE_LEFT_CORNER\x10\x14\x12\x1a\n\x16RIGHT_EYE_TOP_BOUNDARY\x10\x15\x12\x1a\n\x16RIGHT_EYE_RIGHT_CORNER\x10\x16\x12\x1d\n\x19RIGHT_EYE_BOTTOM_BOUNDARY\x10\x17\x12\x19\n\x15RIGHT_EYE_LEFT_CORNER\x10\x18\x12\x1f\n\x1bLEFT_EYEBROW_UPPER_MIDPOINT\x10\x19\x12 \n\x1cRIGHT_EYEBROW_UPPER_MIDPOINT\x10\x1a\x12\x14\n\x10LEFT_EAR_TRAGION\x10\x1b\x12\x15\n\x11RIGHT_EAR_TRAGION\x10\x1c\x12\x12\n\x0eLEFT_EYE_PUPIL\x10\x1d\x12\x13\n\x0fRIGHT_EYE_PUPIL\x10\x1e\x12\x15\n\x11\x46OREHEAD_GLABELLA\x10\x1f\x12\x11\n\rCHIN_GNATHION\x10 \x12\x14\n\x10\x43HIN_LEFT_GONION\x10!\x12\x15\n\x11\x43HIN_RIGHT_GONION\x10""4\n\x0cLocationInfo\x12$\n\x07lat_lng\x18\x01 \x01(\x0b\x32\x13.google.type.LatLng"=\n\x08Property\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\x12\x14\n\x0cuint64_value\x18\x03 \x01(\x04"\xc0\x02\n\x10\x45ntityAnnotation\x12\x0b\n\x03mid\x18\x01 \x01(\t\x12\x0e\n\x06locale\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\r\n\x05score\x18\x04 \x01(\x02\x12\x16\n\nconfidence\x18\x05 \x01(\x02\x42\x02\x18\x01\x12\x12\n\ntopicality\x18\x06 \x01(\x02\x12\x42\n\rbounding_poly\x18\x07 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly\x12>\n\tlocations\x18\x08 \x03(\x0b\x32+.google.cloud.vision.v1p4beta1.LocationInfo\x12;\n\nproperties\x18\t \x03(\x0b\x32\'.google.cloud.vision.v1p4beta1.Property"\xa0\x01\n\x19LocalizedObjectAnnotation\x12\x0b\n\x03mid\x18\x01 \x01(\t\x12\x15\n\rlanguage_code\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\r\n\x05score\x18\x04 \x01(\x02\x12\x42\n\rbounding_poly\x18\x05 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly"\xbc\x02\n\x14SafeSearchAnnotation\x12\x38\n\x05\x61\x64ult\x18\x01 \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x38\n\x05spoof\x18\x02 \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12:\n\x07medical\x18\x03 \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12;\n\x08violence\x18\x04 \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood\x12\x37\n\x04racy\x18\t \x01(\x0e\x32).google.cloud.vision.v1p4beta1.Likelihood"a\n\x0bLatLongRect\x12(\n\x0bmin_lat_lng\x18\x01 \x01(\x0b\x32\x13.google.type.LatLng\x12(\n\x0bmax_lat_lng\x18\x02 \x01(\x0b\x32\x13.google.type.LatLng"U\n\tColorInfo\x12!\n\x05\x63olor\x18\x01 \x01(\x0b\x32\x12.google.type.Color\x12\r\n\x05score\x18\x02 \x01(\x02\x12\x16\n\x0epixel_fraction\x18\x03 \x01(\x02"T\n\x18\x44ominantColorsAnnotation\x12\x38\n\x06\x63olors\x18\x01 \x03(\x0b\x32(.google.cloud.vision.v1p4beta1.ColorInfo"c\n\x0fImageProperties\x12P\n\x0f\x64ominant_colors\x18\x01 \x01(\x0b\x32\x37.google.cloud.vision.v1p4beta1.DominantColorsAnnotation"\x7f\n\x08\x43ropHint\x12\x42\n\rbounding_poly\x18\x01 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\x1b\n\x13importance_fraction\x18\x03 \x01(\x02"R\n\x13\x43ropHintsAnnotation\x12;\n\ncrop_hints\x18\x01 \x03(\x0b\x32\'.google.cloud.vision.v1p4beta1.CropHint"(\n\x0f\x43ropHintsParams\x12\x15\n\raspect_ratios\x18\x01 \x03(\x02"1\n\x12WebDetectionParams\x12\x1b\n\x13include_geo_results\x18\x02 \x01(\x08"\xaf\x03\n\x0cImageContext\x12\x41\n\rlat_long_rect\x18\x01 \x01(\x0b\x32*.google.cloud.vision.v1p4beta1.LatLongRect\x12\x16\n\x0elanguage_hints\x18\x02 \x03(\t\x12I\n\x11\x63rop_hints_params\x18\x04 \x01(\x0b\x32..google.cloud.vision.v1p4beta1.CropHintsParams\x12U\n\x17\x66\x61\x63\x65_recognition_params\x18\n \x01(\x0b\x32\x34.google.cloud.vision.v1p4beta1.FaceRecognitionParams\x12Q\n\x15product_search_params\x18\x05 \x01(\x0b\x32\x32.google.cloud.vision.v1p4beta1.ProductSearchParams\x12O\n\x14web_detection_params\x18\x06 \x01(\x0b\x32\x31.google.cloud.vision.v1p4beta1.WebDetectionParams"\xc9\x01\n\x14\x41nnotateImageRequest\x12\x33\n\x05image\x18\x01 \x01(\x0b\x32$.google.cloud.vision.v1p4beta1.Image\x12\x38\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32&.google.cloud.vision.v1p4beta1.Feature\x12\x42\n\rimage_context\x18\x03 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.ImageContext":\n\x16ImageAnnotationContext\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12\x13\n\x0bpage_number\x18\x02 \x01(\x05"\xbf\x08\n\x15\x41nnotateImageResponse\x12G\n\x10\x66\x61\x63\x65_annotations\x18\x01 \x03(\x0b\x32-.google.cloud.vision.v1p4beta1.FaceAnnotation\x12M\n\x14landmark_annotations\x18\x02 \x03(\x0b\x32/.google.cloud.vision.v1p4beta1.EntityAnnotation\x12I\n\x10logo_annotations\x18\x03 \x03(\x0b\x32/.google.cloud.vision.v1p4beta1.EntityAnnotation\x12J\n\x11label_annotations\x18\x04 \x03(\x0b\x32/.google.cloud.vision.v1p4beta1.EntityAnnotation\x12^\n\x1clocalized_object_annotations\x18\x16 \x03(\x0b\x32\x38.google.cloud.vision.v1p4beta1.LocalizedObjectAnnotation\x12I\n\x10text_annotations\x18\x05 \x03(\x0b\x32/.google.cloud.vision.v1p4beta1.EntityAnnotation\x12K\n\x14\x66ull_text_annotation\x18\x0c \x01(\x0b\x32-.google.cloud.vision.v1p4beta1.TextAnnotation\x12S\n\x16safe_search_annotation\x18\x06 \x01(\x0b\x32\x33.google.cloud.vision.v1p4beta1.SafeSearchAnnotation\x12S\n\x1bimage_properties_annotation\x18\x08 \x01(\x0b\x32..google.cloud.vision.v1p4beta1.ImageProperties\x12Q\n\x15\x63rop_hints_annotation\x18\x0b \x01(\x0b\x32\x32.google.cloud.vision.v1p4beta1.CropHintsAnnotation\x12\x42\n\rweb_detection\x18\r \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.WebDetection\x12S\n\x16product_search_results\x18\x0e \x01(\x0b\x32\x33.google.cloud.vision.v1p4beta1.ProductSearchResults\x12!\n\x05\x65rror\x18\t \x01(\x0b\x32\x12.google.rpc.Status\x12\x46\n\x07\x63ontext\x18\x15 \x01(\x0b\x32\x35.google.cloud.vision.v1p4beta1.ImageAnnotationContext"c\n\x1a\x42\x61tchAnnotateImagesRequest\x12\x45\n\x08requests\x18\x01 \x03(\x0b\x32\x33.google.cloud.vision.v1p4beta1.AnnotateImageRequest"f\n\x1b\x42\x61tchAnnotateImagesResponse\x12G\n\tresponses\x18\x01 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.AnnotateImageResponse"\xe4\x01\n\x13\x41nnotateFileRequest\x12@\n\x0cinput_config\x18\x01 \x01(\x0b\x32*.google.cloud.vision.v1p4beta1.InputConfig\x12\x38\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32&.google.cloud.vision.v1p4beta1.Feature\x12\x42\n\rimage_context\x18\x03 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.ImageContext\x12\r\n\x05pages\x18\x04 \x03(\x05"\xd9\x01\n\x14\x41nnotateFileResponse\x12@\n\x0cinput_config\x18\x01 \x01(\x0b\x32*.google.cloud.vision.v1p4beta1.InputConfig\x12G\n\tresponses\x18\x02 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.AnnotateImageResponse\x12\x13\n\x0btotal_pages\x18\x03 \x01(\x05\x12!\n\x05\x65rror\x18\x04 \x01(\x0b\x32\x12.google.rpc.Status"f\n\x19\x42\x61tchAnnotateFilesRequest\x12I\n\x08requests\x18\x01 \x03(\x0b\x32\x32.google.cloud.vision.v1p4beta1.AnnotateFileRequestB\x03\xe0\x41\x02"d\n\x1a\x42\x61tchAnnotateFilesResponse\x12\x46\n\tresponses\x18\x01 \x03(\x0b\x32\x33.google.cloud.vision.v1p4beta1.AnnotateFileResponse"\x9e\x02\n\x18\x41syncAnnotateFileRequest\x12@\n\x0cinput_config\x18\x01 \x01(\x0b\x32*.google.cloud.vision.v1p4beta1.InputConfig\x12\x38\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0b\x32&.google.cloud.vision.v1p4beta1.Feature\x12\x42\n\rimage_context\x18\x03 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.ImageContext\x12\x42\n\routput_config\x18\x04 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.OutputConfig"_\n\x19\x41syncAnnotateFileResponse\x12\x42\n\routput_config\x18\x01 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.OutputConfig"\xb6\x01\n\x1f\x41syncBatchAnnotateImagesRequest\x12J\n\x08requests\x18\x01 \x03(\x0b\x32\x33.google.cloud.vision.v1p4beta1.AnnotateImageRequestB\x03\xe0\x41\x02\x12G\n\routput_config\x18\x02 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.OutputConfigB\x03\xe0\x41\x02"f\n AsyncBatchAnnotateImagesResponse\x12\x42\n\routput_config\x18\x01 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.OutputConfig"p\n\x1e\x41syncBatchAnnotateFilesRequest\x12N\n\x08requests\x18\x01 \x03(\x0b\x32\x37.google.cloud.vision.v1p4beta1.AsyncAnnotateFileRequestB\x03\xe0\x41\x02"n\n\x1f\x41syncBatchAnnotateFilesResponse\x12K\n\tresponses\x18\x01 \x03(\x0b\x32\x38.google.cloud.vision.v1p4beta1.AsyncAnnotateFileResponse"o\n\x0bInputConfig\x12<\n\ngcs_source\x18\x01 \x01(\x0b\x32(.google.cloud.vision.v1p4beta1.GcsSource\x12\x0f\n\x07\x63ontent\x18\x03 \x01(\x0c\x12\x11\n\tmime_type\x18\x02 \x01(\t"j\n\x0cOutputConfig\x12\x46\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32-.google.cloud.vision.v1p4beta1.GcsDestination\x12\x12\n\nbatch_size\x18\x02 \x01(\x05"\x18\n\tGcsSource\x12\x0b\n\x03uri\x18\x01 \x01(\t"\x1d\n\x0eGcsDestination\x12\x0b\n\x03uri\x18\x01 \x01(\t"\x8f\x02\n\x11OperationMetadata\x12\x45\n\x05state\x18\x01 \x01(\x0e\x32\x36.google.cloud.vision.v1p4beta1.OperationMetadata.State\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"Q\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x43REATED\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03\x12\r\n\tCANCELLED\x10\x04*e\n\nLikelihood\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05\x32\xf1\x07\n\x0eImageAnnotator\x12\xbe\x01\n\x13\x42\x61tchAnnotateImages\x12\x39.google.cloud.vision.v1p4beta1.BatchAnnotateImagesRequest\x1a:.google.cloud.vision.v1p4beta1.BatchAnnotateImagesResponse"0\x82\xd3\xe4\x93\x02\x1f"\x1a/v1p4beta1/images:annotate:\x01*\xda\x41\x08requests\x12\xba\x01\n\x12\x42\x61tchAnnotateFiles\x12\x38.google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest\x1a\x39.google.cloud.vision.v1p4beta1.BatchAnnotateFilesResponse"/\x82\xd3\xe4\x93\x02\x1e"\x19/v1p4beta1/files:annotate:\x01*\xda\x41\x08requests\x12\xfc\x01\n\x18\x41syncBatchAnnotateImages\x12>.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateImagesRequest\x1a\x1d.google.longrunning.Operation"\x80\x01\x82\xd3\xe4\x93\x02)"$/v1p4beta1/images:asyncBatchAnnotate:\x01*\xda\x41\x16requests,output_config\xca\x41\x35\n AsyncBatchAnnotateImagesResponse\x12\x11OperationMetadata\x12\xe9\x01\n\x17\x41syncBatchAnnotateFiles\x12=.google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest\x1a\x1d.google.longrunning.Operation"p\x82\xd3\xe4\x93\x02("#/v1p4beta1/files:asyncBatchAnnotate:\x01*\xda\x41\x08requests\xca\x41\x34\n\x1f\x41syncBatchAnnotateFilesResponse\x12\x11OperationMetadata\x1av\xca\x41\x15vision.googleapis.com\xd2\x41[https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-visionB\x89\x01\n!com.google.cloud.vision.v1p4beta1B\x13ImageAnnotatorProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\xf8\x01\x01\xa2\x02\x04GCVNb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_face__pb2.DESCRIPTOR, google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_geometry__pb2.DESCRIPTOR, google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_product__search__pb2.DESCRIPTOR, google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_text__annotation__pb2.DESCRIPTOR, @@ -91,8 +99,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=8456, - serialized_end=8557, + serialized_start=8786, + serialized_end=8887, ) _sym_db.RegisterEnumDescriptor(_LIKELIHOOD) @@ -189,8 +197,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=636, - serialized_end=927, + serialized_start=742, + serialized_end=1033, ) _sym_db.RegisterEnumDescriptor(_FEATURE_TYPE) @@ -416,8 +424,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2072, - serialized_end=2892, + serialized_start=2260, + serialized_end=3080, ) _sym_db.RegisterEnumDescriptor(_FACEANNOTATION_LANDMARK_TYPE) @@ -449,8 +457,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=8373, - serialized_end=8454, + serialized_start=8703, + serialized_end=8784, ) _sym_db.RegisterEnumDescriptor(_OPERATIONMETADATA_STATE) @@ -525,8 +533,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=529, - serialized_end=927, + serialized_start=635, + serialized_end=1033, ) @@ -582,8 +590,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=929, - serialized_end=984, + serialized_start=1035, + serialized_end=1090, ) @@ -639,8 +647,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=986, - serialized_end=1070, + serialized_start=1092, + serialized_end=1176, ) @@ -696,8 +704,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1925, - serialized_end=2892, + serialized_start=2113, + serialized_end=3080, ) _FACEANNOTATION = _descriptor.Descriptor( @@ -977,6 +985,24 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="recognition_result", + full_name="google.cloud.vision.v1p4beta1.FaceAnnotation.recognition_result", + index=15, + number=16, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[_FACEANNOTATION_LANDMARK], @@ -986,8 +1012,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1073, - serialized_end=2892, + serialized_start=1179, + serialized_end=3080, ) @@ -1025,8 +1051,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2894, - serialized_end=2946, + serialized_start=3082, + serialized_end=3134, ) @@ -1100,8 +1126,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2948, - serialized_end=3009, + serialized_start=3136, + serialized_end=3197, ) @@ -1283,8 +1309,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3012, - serialized_end=3332, + serialized_start=3200, + serialized_end=3520, ) @@ -1394,8 +1420,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3335, - serialized_end=3495, + serialized_start=3523, + serialized_end=3683, ) @@ -1505,8 +1531,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3498, - serialized_end=3814, + serialized_start=3686, + serialized_end=4002, ) @@ -1562,8 +1588,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3816, - serialized_end=3913, + serialized_start=4004, + serialized_end=4101, ) @@ -1637,8 +1663,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3915, - serialized_end=4000, + serialized_start=4103, + serialized_end=4188, ) @@ -1676,8 +1702,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4002, - serialized_end=4086, + serialized_start=4190, + serialized_end=4274, ) @@ -1715,8 +1741,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4088, - serialized_end=4187, + serialized_start=4276, + serialized_end=4375, ) @@ -1790,8 +1816,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4189, - serialized_end=4316, + serialized_start=4377, + serialized_end=4504, ) @@ -1829,8 +1855,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4318, - serialized_end=4400, + serialized_start=4506, + serialized_end=4588, ) @@ -1868,8 +1894,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4402, - serialized_end=4442, + serialized_start=4590, + serialized_end=4630, ) @@ -1907,8 +1933,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4444, - serialized_end=4493, + serialized_start=4632, + serialized_end=4681, ) @@ -1973,10 +1999,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="face_recognition_params", + full_name="google.cloud.vision.v1p4beta1.ImageContext.face_recognition_params", + index=3, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="product_search_params", full_name="google.cloud.vision.v1p4beta1.ImageContext.product_search_params", - index=3, + index=4, number=5, type=11, cpp_type=10, @@ -1994,7 +2038,7 @@ _descriptor.FieldDescriptor( name="web_detection_params", full_name="google.cloud.vision.v1p4beta1.ImageContext.web_detection_params", - index=4, + index=5, number=6, type=11, cpp_type=10, @@ -2018,8 +2062,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4496, - serialized_end=4840, + serialized_start=4684, + serialized_end=5115, ) @@ -2093,8 +2137,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4843, - serialized_end=5044, + serialized_start=5118, + serialized_end=5319, ) @@ -2150,8 +2194,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5046, - serialized_end=5104, + serialized_start=5321, + serialized_end=5379, ) @@ -2423,43 +2467,25 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5107, - serialized_end=6194, + serialized_start=5382, + serialized_end=6469, ) -_ANNOTATEFILERESPONSE = _descriptor.Descriptor( - name="AnnotateFileResponse", - full_name="google.cloud.vision.v1p4beta1.AnnotateFileResponse", +_BATCHANNOTATEIMAGESREQUEST = _descriptor.Descriptor( + name="BatchAnnotateImagesRequest", + full_name="google.cloud.vision.v1p4beta1.BatchAnnotateImagesRequest", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name="input_config", - full_name="google.cloud.vision.v1p4beta1.AnnotateFileResponse.input_config", + name="requests", + full_name="google.cloud.vision.v1p4beta1.BatchAnnotateImagesRequest.requests", index=0, number=1, type=11, cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="responses", - full_name="google.cloud.vision.v1p4beta1.AnnotateFileResponse.responses", - index=1, - number=2, - type=11, - cpp_type=10, label=3, has_default_value=False, default_value=[], @@ -2470,25 +2496,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="total_pages", - full_name="google.cloud.vision.v1p4beta1.AnnotateFileResponse.total_pages", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), + ) ], extensions=[], nested_types=[], @@ -2498,21 +2506,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6197, - serialized_end=6379, + serialized_start=6471, + serialized_end=6570, ) -_BATCHANNOTATEIMAGESREQUEST = _descriptor.Descriptor( - name="BatchAnnotateImagesRequest", - full_name="google.cloud.vision.v1p4beta1.BatchAnnotateImagesRequest", +_BATCHANNOTATEIMAGESRESPONSE = _descriptor.Descriptor( + name="BatchAnnotateImagesResponse", + full_name="google.cloud.vision.v1p4beta1.BatchAnnotateImagesResponse", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name="requests", - full_name="google.cloud.vision.v1p4beta1.BatchAnnotateImagesRequest.requests", + name="responses", + full_name="google.cloud.vision.v1p4beta1.BatchAnnotateImagesResponse.responses", index=0, number=1, type=11, @@ -2537,25 +2545,43 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6381, - serialized_end=6480, + serialized_start=6572, + serialized_end=6674, ) -_BATCHANNOTATEIMAGESRESPONSE = _descriptor.Descriptor( - name="BatchAnnotateImagesResponse", - full_name="google.cloud.vision.v1p4beta1.BatchAnnotateImagesResponse", +_ANNOTATEFILEREQUEST = _descriptor.Descriptor( + name="AnnotateFileRequest", + full_name="google.cloud.vision.v1p4beta1.AnnotateFileRequest", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name="responses", - full_name="google.cloud.vision.v1p4beta1.BatchAnnotateImagesResponse.responses", + name="input_config", + full_name="google.cloud.vision.v1p4beta1.AnnotateFileRequest.input_config", index=0, number=1, type=11, cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="features", + full_name="google.cloud.vision.v1p4beta1.AnnotateFileRequest.features", + index=1, + number=2, + type=11, + cpp_type=10, label=3, has_default_value=False, default_value=[], @@ -2566,7 +2592,43 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), + _descriptor.FieldDescriptor( + name="image_context", + full_name="google.cloud.vision.v1p4beta1.AnnotateFileRequest.image_context", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="pages", + full_name="google.cloud.vision.v1p4beta1.AnnotateFileRequest.pages", + index=3, + number=4, + type=5, + cpp_type=1, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -2576,21 +2638,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6482, - serialized_end=6584, + serialized_start=6677, + serialized_end=6905, ) -_ANNOTATEFILEREQUEST = _descriptor.Descriptor( - name="AnnotateFileRequest", - full_name="google.cloud.vision.v1p4beta1.AnnotateFileRequest", +_ANNOTATEFILERESPONSE = _descriptor.Descriptor( + name="AnnotateFileResponse", + full_name="google.cloud.vision.v1p4beta1.AnnotateFileResponse", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="input_config", - full_name="google.cloud.vision.v1p4beta1.AnnotateFileRequest.input_config", + full_name="google.cloud.vision.v1p4beta1.AnnotateFileResponse.input_config", index=0, number=1, type=11, @@ -2607,8 +2669,8 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="features", - full_name="google.cloud.vision.v1p4beta1.AnnotateFileRequest.features", + name="responses", + full_name="google.cloud.vision.v1p4beta1.AnnotateFileResponse.responses", index=1, number=2, type=11, @@ -2625,15 +2687,15 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="image_context", - full_name="google.cloud.vision.v1p4beta1.AnnotateFileRequest.image_context", + name="total_pages", + full_name="google.cloud.vision.v1p4beta1.AnnotateFileResponse.total_pages", index=2, number=3, - type=11, - cpp_type=10, + type=5, + cpp_type=1, label=1, has_default_value=False, - default_value=None, + default_value=0, message_type=None, enum_type=None, containing_type=None, @@ -2643,15 +2705,15 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="pages", - full_name="google.cloud.vision.v1p4beta1.AnnotateFileRequest.pages", + name="error", + full_name="google.cloud.vision.v1p4beta1.AnnotateFileResponse.error", index=3, number=4, - type=5, - cpp_type=1, - label=3, + type=11, + cpp_type=10, + label=1, has_default_value=False, - default_value=[], + default_value=None, message_type=None, enum_type=None, containing_type=None, @@ -2669,8 +2731,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6587, - serialized_end=6815, + serialized_start=6908, + serialized_end=7125, ) @@ -2696,7 +2758,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -2708,8 +2770,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6817, - serialized_end=6914, + serialized_start=7127, + serialized_end=7229, ) @@ -2747,8 +2809,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6916, - serialized_end=7016, + serialized_start=7231, + serialized_end=7331, ) @@ -2840,8 +2902,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7019, - serialized_end=7305, + serialized_start=7334, + serialized_end=7620, ) @@ -2879,8 +2941,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7307, - serialized_end=7402, + serialized_start=7622, + serialized_end=7717, ) @@ -2906,7 +2968,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2924,7 +2986,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2936,8 +2998,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7405, - serialized_end=7577, + serialized_start=7720, + serialized_end=7902, ) @@ -2975,8 +3037,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7579, - serialized_end=7681, + serialized_start=7904, + serialized_end=8006, ) @@ -3002,7 +3064,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -3014,8 +3076,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7683, - serialized_end=7790, + serialized_start=8008, + serialized_end=8120, ) @@ -3053,8 +3115,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7792, - serialized_end=7902, + serialized_start=8122, + serialized_end=8232, ) @@ -3128,8 +3190,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=7904, - serialized_end=8015, + serialized_start=8234, + serialized_end=8345, ) @@ -3185,8 +3247,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=8017, - serialized_end=8123, + serialized_start=8347, + serialized_end=8453, ) @@ -3224,8 +3286,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=8125, - serialized_end=8149, + serialized_start=8455, + serialized_end=8479, ) @@ -3263,8 +3325,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=8151, - serialized_end=8180, + serialized_start=8481, + serialized_end=8510, ) @@ -3338,8 +3400,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=8183, - serialized_end=8454, + serialized_start=8513, + serialized_end=8784, ) _FEATURE.fields_by_name["type"].enum_type = _FEATURE_TYPE @@ -3373,6 +3435,11 @@ _FACEANNOTATION.fields_by_name["under_exposed_likelihood"].enum_type = _LIKELIHOOD _FACEANNOTATION.fields_by_name["blurred_likelihood"].enum_type = _LIKELIHOOD _FACEANNOTATION.fields_by_name["headwear_likelihood"].enum_type = _LIKELIHOOD +_FACEANNOTATION.fields_by_name[ + "recognition_result" +].message_type = ( + google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_face__pb2._FACERECOGNITIONRESULT +) _LOCATIONINFO.fields_by_name[ "lat_lng" ].message_type = google_dot_type_dot_latlng__pb2._LATLNG @@ -3412,6 +3479,11 @@ _CROPHINTSANNOTATION.fields_by_name["crop_hints"].message_type = _CROPHINT _IMAGECONTEXT.fields_by_name["lat_long_rect"].message_type = _LATLONGRECT _IMAGECONTEXT.fields_by_name["crop_hints_params"].message_type = _CROPHINTSPARAMS +_IMAGECONTEXT.fields_by_name[ + "face_recognition_params" +].message_type = ( + google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_face__pb2._FACERECOGNITIONPARAMS +) _IMAGECONTEXT.fields_by_name[ "product_search_params" ].message_type = ( @@ -3465,8 +3537,6 @@ "error" ].message_type = google_dot_rpc_dot_status__pb2._STATUS _ANNOTATEIMAGERESPONSE.fields_by_name["context"].message_type = _IMAGEANNOTATIONCONTEXT -_ANNOTATEFILERESPONSE.fields_by_name["input_config"].message_type = _INPUTCONFIG -_ANNOTATEFILERESPONSE.fields_by_name["responses"].message_type = _ANNOTATEIMAGERESPONSE _BATCHANNOTATEIMAGESREQUEST.fields_by_name[ "requests" ].message_type = _ANNOTATEIMAGEREQUEST @@ -3476,6 +3546,11 @@ _ANNOTATEFILEREQUEST.fields_by_name["input_config"].message_type = _INPUTCONFIG _ANNOTATEFILEREQUEST.fields_by_name["features"].message_type = _FEATURE _ANNOTATEFILEREQUEST.fields_by_name["image_context"].message_type = _IMAGECONTEXT +_ANNOTATEFILERESPONSE.fields_by_name["input_config"].message_type = _INPUTCONFIG +_ANNOTATEFILERESPONSE.fields_by_name["responses"].message_type = _ANNOTATEIMAGERESPONSE +_ANNOTATEFILERESPONSE.fields_by_name[ + "error" +].message_type = google_dot_rpc_dot_status__pb2._STATUS _BATCHANNOTATEFILESREQUEST.fields_by_name[ "requests" ].message_type = _ANNOTATEFILEREQUEST @@ -3535,7 +3610,6 @@ DESCRIPTOR.message_types_by_name["AnnotateImageRequest"] = _ANNOTATEIMAGEREQUEST DESCRIPTOR.message_types_by_name["ImageAnnotationContext"] = _IMAGEANNOTATIONCONTEXT DESCRIPTOR.message_types_by_name["AnnotateImageResponse"] = _ANNOTATEIMAGERESPONSE -DESCRIPTOR.message_types_by_name["AnnotateFileResponse"] = _ANNOTATEFILERESPONSE DESCRIPTOR.message_types_by_name[ "BatchAnnotateImagesRequest" ] = _BATCHANNOTATEIMAGESREQUEST @@ -3543,6 +3617,7 @@ "BatchAnnotateImagesResponse" ] = _BATCHANNOTATEIMAGESRESPONSE DESCRIPTOR.message_types_by_name["AnnotateFileRequest"] = _ANNOTATEFILEREQUEST +DESCRIPTOR.message_types_by_name["AnnotateFileResponse"] = _ANNOTATEFILERESPONSE DESCRIPTOR.message_types_by_name[ "BatchAnnotateFilesRequest" ] = _BATCHANNOTATEFILESREQUEST @@ -3737,6 +3812,14 @@ Blurred likelihood. headwear_likelihood: Headwear likelihood. + recognition_result: + Additional recognition information. Only computed if + image\_context.face\_recognition\_params is provided, **and** + a match is found to a + [Celebrity][google.cloud.vision.v1p4beta1.Celebrity] in the + input + [CelebritySet][google.cloud.vision.v1p4beta1.CelebritySet]. + This field is sorted in order of decreasing confidence values. """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.FaceAnnotation) ), @@ -4094,6 +4177,8 @@ `__. crop_hints_params: Parameters for crop hints annotation request. + face_recognition_params: + Parameters for face recognition. product_search_params: Parameters for product search. web_detection_params: @@ -4201,30 +4286,6 @@ ) _sym_db.RegisterMessage(AnnotateImageResponse) -AnnotateFileResponse = _reflection.GeneratedProtocolMessageType( - "AnnotateFileResponse", - (_message.Message,), - dict( - DESCRIPTOR=_ANNOTATEFILERESPONSE, - __module__="google.cloud.vision_v1p4beta1.proto.image_annotator_pb2", - __doc__="""Response to a single file annotation request. A file may contain one or - more images, which individually have their own responses. - - - Attributes: - input_config: - Information about the file for which this response is - generated. - responses: - Individual responses to images found within the file. - total_pages: - This field gives the total number of pages in the file. - """, - # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.AnnotateFileResponse) - ), -) -_sym_db.RegisterMessage(AnnotateFileResponse) - BatchAnnotateImagesRequest = _reflection.GeneratedProtocolMessageType( "BatchAnnotateImagesRequest", (_message.Message,), @@ -4296,6 +4357,34 @@ ) _sym_db.RegisterMessage(AnnotateFileRequest) +AnnotateFileResponse = _reflection.GeneratedProtocolMessageType( + "AnnotateFileResponse", + (_message.Message,), + dict( + DESCRIPTOR=_ANNOTATEFILERESPONSE, + __module__="google.cloud.vision_v1p4beta1.proto.image_annotator_pb2", + __doc__="""Response to a single file annotation request. A file may contain one or + more images, which individually have their own responses. + + + Attributes: + input_config: + Information about the file for which this response is + generated. + responses: + Individual responses to images found within the file. This + field will be empty if the ``error`` field is set. + total_pages: + This field gives the total number of pages in the file. + error: + If set, represents the error message for the failed request. + The ``responses`` field will not be set in this case. + """, + # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.AnnotateFileResponse) + ), +) +_sym_db.RegisterMessage(AnnotateFileResponse) + BatchAnnotateFilesRequest = _reflection.GeneratedProtocolMessageType( "BatchAnnotateFilesRequest", (_message.Message,), @@ -4307,8 +4396,9 @@ Attributes: requests: - The list of file annotation requests. Right now we support - only one AnnotateFileRequest in BatchAnnotateFilesRequest. + Required. The list of file annotation requests. Right now we + support only one AnnotateFileRequest in + BatchAnnotateFilesRequest. """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.BatchAnnotateFilesRequest) ), @@ -4391,7 +4481,7 @@ Attributes: requests: - Individual image annotation requests for this batch. + Required. Individual image annotation requests for this batch. output_config: Required. The desired output location and metadata (e.g. format). @@ -4432,7 +4522,8 @@ Attributes: requests: - Individual async file annotation requests for this batch. + Required. Individual async file annotation requests for this + batch. """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.AsyncBatchAnnotateFilesRequest) ), @@ -4478,8 +4569,9 @@ requests. It does not work for AsyncBatchAnnotateFiles requests. mime_type: - The type of the file. Currently only "application/pdf" and - "image/tiff" are supported. Wildcards are not supported. + The type of the file. Currently only "application/pdf", + "image/tiff" and "image/gif" are supported. Wildcards are not + supported. """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.InputConfig) ), @@ -4545,17 +4637,25 @@ Attributes: uri: - Google Cloud Storage URI where the results will be stored. - Results will be in JSON format and preceded by its - corresponding input URI. This field can either represent a - single file, or a prefix for multiple outputs. Prefixes must - end in a ``/``. Examples: - File: gs://bucket- - name/filename.json - Prefix: gs://bucket-name/prefix/here/ - - File: gs://bucket-name/prefix/here If multiple outputs, each - response is still AnnotateFileResponse, each of which contains - some subset of the full list of AnnotateImageResponse. - Multiple outputs can happen if, for example, the output JSON - is too large and overflows into multiple sharded files. + Google Cloud Storage URI prefix where the results will be + stored. Results will be in JSON format and preceded by its + corresponding input URI prefix. This field can either + represent a gcs file prefix or gcs directory. In either case, + the uri should be unique because in order to get all of the + output files, you will need to do a wildcard gcs search on the + uri prefix you provide. Examples: - File Prefix: + gs://bucket-name/here/filenameprefix The output files will + be created in gs://bucket-name/here/ and the names of the + output files will begin with "filenameprefix". - + Directory Prefix: gs://bucket-name/some/location/ The output + files will be created in gs://bucket-name/some/location/ + and the names of the output files could be anything because + there was no filename prefix specified. If multiple + outputs, each response is still AnnotateFileResponse, each of + which contains some subset of the full list of + AnnotateImageResponse. Multiple outputs can happen if, for + example, the output JSON is too large and overflows into + multiple sharded files. """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.GcsDestination) ), @@ -4587,15 +4687,21 @@ DESCRIPTOR._options = None _ENTITYANNOTATION.fields_by_name["confidence"]._options = None +_BATCHANNOTATEFILESREQUEST.fields_by_name["requests"]._options = None +_ASYNCBATCHANNOTATEIMAGESREQUEST.fields_by_name["requests"]._options = None +_ASYNCBATCHANNOTATEIMAGESREQUEST.fields_by_name["output_config"]._options = None +_ASYNCBATCHANNOTATEFILESREQUEST.fields_by_name["requests"]._options = None _IMAGEANNOTATOR = _descriptor.ServiceDescriptor( name="ImageAnnotator", full_name="google.cloud.vision.v1p4beta1.ImageAnnotator", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=8560, - serialized_end=9279, + serialized_options=_b( + "\312A\025vision.googleapis.com\322A[https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-vision" + ), + serialized_start=8890, + serialized_end=9899, methods=[ _descriptor.MethodDescriptor( name="BatchAnnotateImages", @@ -4605,7 +4711,7 @@ input_type=_BATCHANNOTATEIMAGESREQUEST, output_type=_BATCHANNOTATEIMAGESRESPONSE, serialized_options=_b( - '\202\323\344\223\002\037"\032/v1p4beta1/images:annotate:\001*' + '\202\323\344\223\002\037"\032/v1p4beta1/images:annotate:\001*\332A\010requests' ), ), _descriptor.MethodDescriptor( @@ -4616,7 +4722,7 @@ input_type=_BATCHANNOTATEFILESREQUEST, output_type=_BATCHANNOTATEFILESRESPONSE, serialized_options=_b( - '\202\323\344\223\002\036"\031/v1p4beta1/files:annotate:\001*' + '\202\323\344\223\002\036"\031/v1p4beta1/files:annotate:\001*\332A\010requests' ), ), _descriptor.MethodDescriptor( @@ -4627,7 +4733,7 @@ input_type=_ASYNCBATCHANNOTATEIMAGESREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002)"$/v1p4beta1/images:asyncBatchAnnotate:\001*' + '\202\323\344\223\002)"$/v1p4beta1/images:asyncBatchAnnotate:\001*\332A\026requests,output_config\312A5\n AsyncBatchAnnotateImagesResponse\022\021OperationMetadata' ), ), _descriptor.MethodDescriptor( @@ -4638,7 +4744,7 @@ input_type=_ASYNCBATCHANNOTATEFILESREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002("#/v1p4beta1/files:asyncBatchAnnotate:\001*' + '\202\323\344\223\002("#/v1p4beta1/files:asyncBatchAnnotate:\001*\332A\010requests\312A4\n\037AsyncBatchAnnotateFilesResponse\022\021OperationMetadata' ), ), ], diff --git a/vision/google/cloud/vision_v1p4beta1/proto/product_search.proto b/vision/google/cloud/vision_v1p4beta1/proto/product_search.proto index 1caf30194f5b..15baed1fd27c 100644 --- a/vision/google/cloud/vision_v1p4beta1/proto/product_search.proto +++ b/vision/google/cloud/vision_v1p4beta1/proto/product_search.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,6 +18,7 @@ syntax = "proto3"; package google.cloud.vision.v1p4beta1; import "google/api/annotations.proto"; +import "google/api/resource.proto"; import "google/cloud/vision/v1p4beta1/geometry.proto"; import "google/cloud/vision/v1p4beta1/product_search_service.proto"; import "google/protobuf/timestamp.proto"; @@ -32,7 +33,7 @@ option objc_class_prefix = "GCVN"; // Parameters for a product search request. message ProductSearchParams { // The bounding polygon around the area of interest in the image. - // Optional. If it is not specified, system discretion will be applied. + // If it is not specified, system discretion will be applied. BoundingPoly bounding_poly = 9; // The resource name of a @@ -41,23 +42,27 @@ message ProductSearchParams { // // Format is: // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`. - string product_set = 6; + string product_set = 6 [(google.api.resource_reference) = { + type: "vision.googleapis.com/ProductSet" + }]; // The list of product categories to search in. Currently, we only consider - // the first category, and either "homegoods-v2", "apparel-v2", "toys-v2", or - // "packagedgoods-v1" should be specified. The legacy categories "homegoods", - // "apparel", and "toys" are still supported but will be deprecated. For new - // products, please use "homegoods-v2", "apparel-v2", or "toys-v2" for better - // product search accuracy. It is recommended to migrate existing products to - // these categories as well. + // the first category, and either "homegoods-v2", "apparel-v2", "toys-v2", + // "packagedgoods-v1", or "general-v1" should be specified. The legacy + // categories "homegoods", "apparel", and "toys" are still supported but will + // be deprecated. For new products, please use "homegoods-v2", "apparel-v2", + // or "toys-v2" for better product search accuracy. It is recommended to + // migrate existing products to these categories as well. repeated string product_categories = 7; // The filtering expression. This can be used to restrict search results based // on Product labels. We currently support an AND of OR of key-value - // expressions, where each expression within an OR must have the same key. + // expressions, where each expression within an OR must have the same key. An + // '=' should be used to connect the key and value. // // For example, "(color = red OR color = blue) AND brand = Google" is - // acceptable, but not "(color = red OR brand = Google)" or "color: red". + // acceptable, but "(color = red OR brand = Google)" is not acceptable. + // "color: red" is not acceptable because it uses a ':' instead of an '='. string filter = 8; } @@ -77,6 +82,23 @@ message ProductSearchResults { string image = 3; } + // Prediction for what the object in the bounding box is. + message ObjectAnnotation { + // Object ID that should align with EntityAnnotation mid. + string mid = 1; + + // The BCP-47 language code, such as "en-US" or "sr-Latn". For more + // information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + string language_code = 2; + + // Object name, expressed in its `language_code` language. + string name = 3; + + // Score of the result. Range [0, 1]. + float score = 4; + } + // Information about the products similar to a single product in a query // image. message GroupedResult { @@ -85,10 +107,14 @@ message ProductSearchResults { // List of results, one for each product match. repeated Result results = 2; + + // List of generic predictions for the object in the bounding box. + repeated ObjectAnnotation object_annotations = 3; } - // Timestamp of the index which provided these results. Changes made after - // this time are not reflected in the current results. + // Timestamp of the index which provided these results. Products added to the + // product set and products removed from the product set after this time are + // not reflected in the current results. google.protobuf.Timestamp index_time = 2; // List of results, one for each product match. diff --git a/vision/google/cloud/vision_v1p4beta1/proto/product_search_pb2.py b/vision/google/cloud/vision_v1p4beta1/proto/product_search_pb2.py index 16c545c28b97..a631a4440500 100644 --- a/vision/google/cloud/vision_v1p4beta1/proto/product_search_pb2.py +++ b/vision/google/cloud/vision_v1p4beta1/proto/product_search_pb2.py @@ -16,6 +16,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.vision_v1p4beta1.proto import ( geometry_pb2 as google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_geometry__pb2, ) @@ -33,10 +34,11 @@ "\n!com.google.cloud.vision.v1p4beta1B\022ProductSearchProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\370\001\001\242\002\004GCVN" ), serialized_pb=_b( - '\n8google/cloud/vision_v1p4beta1/proto/product_search.proto\x12\x1dgoogle.cloud.vision.v1p4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x32google/cloud/vision_v1p4beta1/proto/geometry.proto\x1a@google/cloud/vision_v1p4beta1/proto/product_search_service.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x9a\x01\n\x13ProductSearchParams\x12\x42\n\rbounding_poly\x18\t \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly\x12\x13\n\x0bproduct_set\x18\x06 \x01(\t\x12\x1a\n\x12product_categories\x18\x07 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x08 \x01(\t"\xfb\x03\n\x14ProductSearchResults\x12.\n\nindex_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12K\n\x07results\x18\x05 \x03(\x0b\x32:.google.cloud.vision.v1p4beta1.ProductSearchResults.Result\x12\x62\n\x17product_grouped_results\x18\x06 \x03(\x0b\x32\x41.google.cloud.vision.v1p4beta1.ProductSearchResults.GroupedResult\x1a_\n\x06Result\x12\x37\n\x07product\x18\x01 \x01(\x0b\x32&.google.cloud.vision.v1p4beta1.Product\x12\r\n\x05score\x18\x02 \x01(\x02\x12\r\n\x05image\x18\x03 \x01(\t\x1a\xa0\x01\n\rGroupedResult\x12\x42\n\rbounding_poly\x18\x01 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly\x12K\n\x07results\x18\x02 \x03(\x0b\x32:.google.cloud.vision.v1p4beta1.ProductSearchResults.ResultB\x88\x01\n!com.google.cloud.vision.v1p4beta1B\x12ProductSearchProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\xf8\x01\x01\xa2\x02\x04GCVNb\x06proto3' + '\n8google/cloud/vision_v1p4beta1/proto/product_search.proto\x12\x1dgoogle.cloud.vision.v1p4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a\x32google/cloud/vision_v1p4beta1/proto/geometry.proto\x1a@google/cloud/vision_v1p4beta1/proto/product_search_service.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc1\x01\n\x13ProductSearchParams\x12\x42\n\rbounding_poly\x18\t \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly\x12:\n\x0bproduct_set\x18\x06 \x01(\tB%\xfa\x41"\n vision.googleapis.com/ProductSet\x12\x1a\n\x12product_categories\x18\x07 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x08 \x01(\t"\xb2\x05\n\x14ProductSearchResults\x12.\n\nindex_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12K\n\x07results\x18\x05 \x03(\x0b\x32:.google.cloud.vision.v1p4beta1.ProductSearchResults.Result\x12\x62\n\x17product_grouped_results\x18\x06 \x03(\x0b\x32\x41.google.cloud.vision.v1p4beta1.ProductSearchResults.GroupedResult\x1a_\n\x06Result\x12\x37\n\x07product\x18\x01 \x01(\x0b\x32&.google.cloud.vision.v1p4beta1.Product\x12\r\n\x05score\x18\x02 \x01(\x02\x12\r\n\x05image\x18\x03 \x01(\t\x1aS\n\x10ObjectAnnotation\x12\x0b\n\x03mid\x18\x01 \x01(\t\x12\x15\n\rlanguage_code\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\r\n\x05score\x18\x04 \x01(\x02\x1a\x82\x02\n\rGroupedResult\x12\x42\n\rbounding_poly\x18\x01 \x01(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly\x12K\n\x07results\x18\x02 \x03(\x0b\x32:.google.cloud.vision.v1p4beta1.ProductSearchResults.Result\x12`\n\x12object_annotations\x18\x03 \x03(\x0b\x32\x44.google.cloud.vision.v1p4beta1.ProductSearchResults.ObjectAnnotationB\x88\x01\n!com.google.cloud.vision.v1p4beta1B\x12ProductSearchProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\xf8\x01\x01\xa2\x02\x04GCVNb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_geometry__pb2.DESCRIPTOR, google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_product__search__service__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, @@ -84,7 +86,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\372A"\n vision.googleapis.com/ProductSet'), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -132,8 +134,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=273, - serialized_end=427, + serialized_start=300, + serialized_end=493, ) @@ -207,8 +209,100 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=679, - serialized_end=774, + serialized_start=745, + serialized_end=840, +) + +_PRODUCTSEARCHRESULTS_OBJECTANNOTATION = _descriptor.Descriptor( + name="ObjectAnnotation", + full_name="google.cloud.vision.v1p4beta1.ProductSearchResults.ObjectAnnotation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="mid", + full_name="google.cloud.vision.v1p4beta1.ProductSearchResults.ObjectAnnotation.mid", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="language_code", + full_name="google.cloud.vision.v1p4beta1.ProductSearchResults.ObjectAnnotation.language_code", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.vision.v1p4beta1.ProductSearchResults.ObjectAnnotation.name", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="score", + full_name="google.cloud.vision.v1p4beta1.ProductSearchResults.ObjectAnnotation.score", + index=3, + number=4, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=842, + serialized_end=925, ) _PRODUCTSEARCHRESULTS_GROUPEDRESULT = _descriptor.Descriptor( @@ -254,6 +348,24 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="object_annotations", + full_name="google.cloud.vision.v1p4beta1.ProductSearchResults.GroupedResult.object_annotations", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -263,8 +375,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=777, - serialized_end=937, + serialized_start=928, + serialized_end=1186, ) _PRODUCTSEARCHRESULTS = _descriptor.Descriptor( @@ -330,15 +442,19 @@ ), ], extensions=[], - nested_types=[_PRODUCTSEARCHRESULTS_RESULT, _PRODUCTSEARCHRESULTS_GROUPEDRESULT], + nested_types=[ + _PRODUCTSEARCHRESULTS_RESULT, + _PRODUCTSEARCHRESULTS_OBJECTANNOTATION, + _PRODUCTSEARCHRESULTS_GROUPEDRESULT, + ], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=430, - serialized_end=937, + serialized_start=496, + serialized_end=1186, ) _PRODUCTSEARCHPARAMS.fields_by_name[ @@ -352,6 +468,7 @@ google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_product__search__service__pb2._PRODUCT ) _PRODUCTSEARCHRESULTS_RESULT.containing_type = _PRODUCTSEARCHRESULTS +_PRODUCTSEARCHRESULTS_OBJECTANNOTATION.containing_type = _PRODUCTSEARCHRESULTS _PRODUCTSEARCHRESULTS_GROUPEDRESULT.fields_by_name[ "bounding_poly" ].message_type = ( @@ -360,6 +477,9 @@ _PRODUCTSEARCHRESULTS_GROUPEDRESULT.fields_by_name[ "results" ].message_type = _PRODUCTSEARCHRESULTS_RESULT +_PRODUCTSEARCHRESULTS_GROUPEDRESULT.fields_by_name[ + "object_annotations" +].message_type = _PRODUCTSEARCHRESULTS_OBJECTANNOTATION _PRODUCTSEARCHRESULTS_GROUPEDRESULT.containing_type = _PRODUCTSEARCHRESULTS _PRODUCTSEARCHRESULTS.fields_by_name[ "index_time" @@ -386,8 +506,7 @@ Attributes: bounding_poly: The bounding polygon around the area of interest in the image. - Optional. If it is not specified, system discretion will be - applied. + If it is not specified, system discretion will be applied. product_set: The resource name of a [ProductSet][google.cloud.vision.v1p4beta1.ProductSet] to be @@ -396,20 +515,22 @@ product_categories: The list of product categories to search in. Currently, we only consider the first category, and either "homegoods-v2", - "apparel-v2", "toys-v2", or "packagedgoods-v1" should be - specified. The legacy categories "homegoods", "apparel", and - "toys" are still supported but will be deprecated. For new - products, please use "homegoods-v2", "apparel-v2", or - "toys-v2" for better product search accuracy. It is - recommended to migrate existing products to these categories - as well. + "apparel-v2", "toys-v2", "packagedgoods-v1", or "general-v1" + should be specified. The legacy categories "homegoods", + "apparel", and "toys" are still supported but will be + deprecated. For new products, please use "homegoods-v2", + "apparel-v2", or "toys-v2" for better product search accuracy. + It is recommended to migrate existing products to these + categories as well. filter: The filtering expression. This can be used to restrict search results based on Product labels. We currently support an AND of OR of key-value expressions, where each expression within - an OR must have the same key. For example, "(color = red OR - color = blue) AND brand = Google" is acceptable, but not - "(color = red OR brand = Google)" or "color: red". + an OR must have the same key. An '=' should be used to connect + the key and value. For example, "(color = red OR color = + blue) AND brand = Google" is acceptable, but "(color = red OR + brand = Google)" is not acceptable. "color: red" is not + acceptable because it uses a ':' instead of an '='. """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.ProductSearchParams) ), @@ -442,6 +563,30 @@ # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.ProductSearchResults.Result) ), ), + ObjectAnnotation=_reflection.GeneratedProtocolMessageType( + "ObjectAnnotation", + (_message.Message,), + dict( + DESCRIPTOR=_PRODUCTSEARCHRESULTS_OBJECTANNOTATION, + __module__="google.cloud.vision_v1p4beta1.proto.product_search_pb2", + __doc__="""Prediction for what the object in the bounding box is. + + + Attributes: + mid: + Object ID that should align with EntityAnnotation mid. + language_code: + The BCP-47 language code, such as "en-US" or "sr-Latn". For + more information, see http://www.unicode.org/reports/tr35/#Uni + code\_locale\_identifier. + name: + Object name, expressed in its ``language_code`` language. + score: + Score of the result. Range [0, 1]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.ProductSearchResults.ObjectAnnotation) + ), + ), GroupedResult=_reflection.GeneratedProtocolMessageType( "GroupedResult", (_message.Message,), @@ -458,6 +603,9 @@ image. results: List of results, one for each product match. + object_annotations: + List of generic predictions for the object in the bounding + box. """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.ProductSearchResults.GroupedResult) ), @@ -469,8 +617,9 @@ Attributes: index_time: - Timestamp of the index which provided these results. Changes - made after this time are not reflected in the current results. + Timestamp of the index which provided these results. Products + added to the product set and products removed from the product + set after this time are not reflected in the current results. results: List of results, one for each product match. product_grouped_results: @@ -485,8 +634,10 @@ ) _sym_db.RegisterMessage(ProductSearchResults) _sym_db.RegisterMessage(ProductSearchResults.Result) +_sym_db.RegisterMessage(ProductSearchResults.ObjectAnnotation) _sym_db.RegisterMessage(ProductSearchResults.GroupedResult) DESCRIPTOR._options = None +_PRODUCTSEARCHPARAMS.fields_by_name["product_set"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/vision/google/cloud/vision_v1p4beta1/proto/product_search_service.proto b/vision/google/cloud/vision_v1p4beta1/proto/product_search_service.proto index 3cbc53573f15..1a165c708831 100644 --- a/vision/google/cloud/vision_v1p4beta1/proto/product_search_service.proto +++ b/vision/google/cloud/vision_v1p4beta1/proto/product_search_service.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -18,6 +18,9 @@ syntax = "proto3"; package google.cloud.vision.v1p4beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/vision/v1p4beta1/geometry.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/empty.proto"; @@ -51,6 +54,11 @@ option objc_class_prefix = "GCVN"; // named // `projects/*/locations/*/products/*/referenceImages/*` service ProductSearch { + option (google.api.default_host) = "vision.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/cloud-vision"; + // Creates and returns a new ProductSet resource. // // Possible errors: @@ -62,6 +70,7 @@ service ProductSearch { post: "/v1p4beta1/{parent=projects/*/locations/*}/productSets" body: "product_set" }; + option (google.api.method_signature) = "parent,product_set,product_set_id"; } // Lists ProductSets in an unspecified order. @@ -75,6 +84,7 @@ service ProductSearch { option (google.api.http) = { get: "/v1p4beta1/{parent=projects/*/locations/*}/productSets" }; + option (google.api.method_signature) = "parent"; } // Gets information associated with a ProductSet. @@ -86,6 +96,7 @@ service ProductSearch { option (google.api.http) = { get: "/v1p4beta1/{name=projects/*/locations/*/productSets/*}" }; + option (google.api.method_signature) = "name"; } // Makes changes to a ProductSet resource. @@ -101,21 +112,19 @@ service ProductSearch { patch: "/v1p4beta1/{product_set.name=projects/*/locations/*/productSets/*}" body: "product_set" }; + option (google.api.method_signature) = "product_set,update_mask"; } // Permanently deletes a ProductSet. Products and ReferenceImages in the // ProductSet are not deleted. // // The actual image files are not deleted from Google Cloud Storage. - // - // Possible errors: - // - // * Returns NOT_FOUND if the ProductSet does not exist. rpc DeleteProductSet(DeleteProductSetRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1p4beta1/{name=projects/*/locations/*/productSets/*}" }; + option (google.api.method_signature) = "name"; } // Creates and returns a new product resource. @@ -131,6 +140,7 @@ service ProductSearch { post: "/v1p4beta1/{parent=projects/*/locations/*}/products" body: "product" }; + option (google.api.method_signature) = "parent,product,product_id"; } // Lists products in an unspecified order. @@ -142,6 +152,7 @@ service ProductSearch { option (google.api.http) = { get: "/v1p4beta1/{parent=projects/*/locations/*}/products" }; + option (google.api.method_signature) = "parent"; } // Gets information associated with a Product. @@ -153,6 +164,7 @@ service ProductSearch { option (google.api.http) = { get: "/v1p4beta1/{name=projects/*/locations/*/products/*}" }; + option (google.api.method_signature) = "name"; } // Makes changes to a Product resource. @@ -175,6 +187,7 @@ service ProductSearch { patch: "/v1p4beta1/{product.name=projects/*/locations/*/products/*}" body: "product" }; + option (google.api.method_signature) = "product,update_mask"; } // Permanently deletes a product and its reference images. @@ -182,14 +195,11 @@ service ProductSearch { // Metadata of the product and all its images will be deleted right away, but // search queries against ProductSets containing the product may still work // until all related caches are refreshed. - // - // Possible errors: - // - // * Returns NOT_FOUND if the product does not exist. rpc DeleteProduct(DeleteProductRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1p4beta1/{name=projects/*/locations/*/products/*}" }; + option (google.api.method_signature) = "name"; } // Creates and returns a new ReferenceImage resource. @@ -217,6 +227,8 @@ service ProductSearch { post: "/v1p4beta1/{parent=projects/*/locations/*/products/*}/referenceImages" body: "reference_image" }; + option (google.api.method_signature) = + "parent,reference_image,reference_image_id"; } // Permanently deletes a reference image. @@ -226,15 +238,12 @@ service ProductSearch { // caches are refreshed. // // The actual image files are not deleted from Google Cloud Storage. - // - // Possible errors: - // - // * Returns NOT_FOUND if the reference image does not exist. rpc DeleteReferenceImage(DeleteReferenceImageRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1p4beta1/{name=projects/*/locations/*/products/*/referenceImages/*}" }; + option (google.api.method_signature) = "name"; } // Lists reference images. @@ -249,6 +258,7 @@ service ProductSearch { option (google.api.http) = { get: "/v1p4beta1/{parent=projects/*/locations/*/products/*}/referenceImages" }; + option (google.api.method_signature) = "parent"; } // Gets information associated with a ReferenceImage. @@ -260,6 +270,7 @@ service ProductSearch { option (google.api.http) = { get: "/v1p4beta1/{name=projects/*/locations/*/products/*/referenceImages/*}" }; + option (google.api.method_signature) = "name"; } // Adds a Product to the specified ProductSet. If the Product is already @@ -276,19 +287,17 @@ service ProductSearch { post: "/v1p4beta1/{name=projects/*/locations/*/productSets/*}:addProduct" body: "*" }; + option (google.api.method_signature) = "name,product"; } // Removes a Product from the specified ProductSet. - // - // Possible errors: - // - // * Returns NOT_FOUND If the Product is not found under the ProductSet. rpc RemoveProductFromProductSet(RemoveProductFromProductSetRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/v1p4beta1/{name=projects/*/locations/*/productSets/*}:removeProduct" body: "*" }; + option (google.api.method_signature) = "name,product"; } // Lists the Products in a ProductSet, in an unspecified order. If the @@ -303,6 +312,7 @@ service ProductSearch { option (google.api.http) = { get: "/v1p4beta1/{name=projects/*/locations/*/productSets/*}/products" }; + option (google.api.method_signature) = "name"; } // Asynchronous API that imports a list of reference images to specified @@ -322,11 +332,58 @@ service ProductSearch { post: "/v1p4beta1/{parent=projects/*/locations/*}/productSets:import" body: "*" }; + option (google.api.method_signature) = "parent,input_config"; + option (google.longrunning.operation_info) = { + response_type: "ImportProductSetsResponse" + metadata_type: "BatchOperationMetadata" + }; + } + + // Asynchronous API to delete all Products in a ProductSet or all Products + // that are in no ProductSet. + // + // If a Product is a member of the specified ProductSet in addition to other + // ProductSets, the Product will still be deleted. + // + // It is recommended to not delete the specified ProductSet until after this + // operation has completed. It is also recommended to not add any of the + // Products involved in the batch delete to a new ProductSet while this + // operation is running because those Products may still end up deleted. + // + // It's not possible to undo the PurgeProducts operation. Therefore, it is + // recommended to keep the csv files used in ImportProductSets (if that was + // how you originally built the Product Set) before starting PurgeProducts, in + // case you need to re-import the data after deletion. + // + // If the plan is to purge all of the Products from a ProductSet and then + // re-use the empty ProductSet to re-import new Products into the empty + // ProductSet, you must wait until the PurgeProducts operation has finished + // for that ProductSet. + // + // The [google.longrunning.Operation][google.longrunning.Operation] API can be + // used to keep track of the progress and results of the request. + // `Operation.metadata` contains `BatchOperationMetadata`. (progress) + rpc PurgeProducts(PurgeProductsRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1p4beta1/{parent=projects/*/locations/*}/products:purge" + body: "*" + }; + option (google.api.method_signature) = "parent"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "BatchOperationMetadata" + }; } } // A Product contains ReferenceImages. message Product { + option (google.api.resource) = { + type: "vision.googleapis.com/Product" + pattern: "projects/{project}/locations/{location}/products/{product}" + }; + // A product label represented as a key-value pair. message KeyValue { // The key of the label attached to the product. Cannot be empty and cannot @@ -354,16 +411,11 @@ message Product { // characters long. string description = 3; - // The category for the product identified by the reference image. This should - // be either "homegoods-v2", "apparel-v2", "toys-v2", or "packagedgoods-v1". - // The legacy categories "homegoods", "apparel", and "toys" are still - // supported but will be deprecated. For new products, please use - // "homegoods-v2", "apparel-v2", or "toys-v2" for better product search - // accuracy. It is recommended to migrate existing products to these - // categories as well. - // - // This field is immutable. - string product_category = 4; + // Immutable. The category for the product identified by the reference image. + // This should be either "homegoods-v2", "apparel-v2", or "toys-v2". The + // legacy categories "homegoods", "apparel", and "toys" are still supported, + // but these should not be used for new products. + string product_category = 4 [(google.api.field_behavior) = IMMUTABLE]; // Key-value pairs that can be attached to a product. At query time, // constraints can be specified based on the product_labels. @@ -373,7 +425,11 @@ message Product { // to be supported soon. // // Multiple values can be assigned to the same key. One product may have up to - // 100 product_labels. + // 500 product_labels. + // + // Notice that the total number of distinct product_labels over all products + // in one ProductSet cannot exceed 1M, otherwise the product search pipeline + // will refuse to work for that ProductSet. repeated KeyValue product_labels = 5; } @@ -381,6 +437,11 @@ message Product { // million reference images. If the limit is exceeded, periodic indexing will // fail. message ProductSet { + option (google.api.resource) = { + type: "vision.googleapis.com/ProductSet" + pattern: "projects/{project}/locations/{location}/productSets/{product_set}" + }; + // The resource name of the ProductSet. // // Format is: @@ -399,18 +460,24 @@ message ProductSet { // "1970-01-01T00:00:00Z". // // This field is ignored when creating a ProductSet. - google.protobuf.Timestamp index_time = 3; + google.protobuf.Timestamp index_time = 3 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. If there was an error with indexing the product set, the field // is populated. // // This field is ignored when creating a ProductSet. - google.rpc.Status index_error = 4; + google.rpc.Status index_error = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A `ReferenceImage` represents a product image and its associated metadata, // such as bounding boxes. message ReferenceImage { + option (google.api.resource) = { + type: "vision.googleapis.com/ReferenceImage" + pattern: "projects/{project}/locations/{location}/products/{product}/referenceImages/{reference_image}" + }; + // The resource name of the reference image. // // Format is: @@ -420,34 +487,38 @@ message ReferenceImage { // This field is ignored when creating a reference image. string name = 1; - // The Google Cloud Storage URI of the reference image. + // Required. The Google Cloud Storage URI of the reference image. // // The URI must start with `gs://`. - // - // Required. - string uri = 2; + string uri = 2 [(google.api.field_behavior) = REQUIRED]; - // Bounding polygons around the areas of interest in the reference image. - // Optional. If this field is empty, the system will try to detect regions of + // Optional. Bounding polygons around the areas of interest in the reference + // image. If this field is empty, the system will try to detect regions of // interest. At most 10 bounding polygons will be used. // // The provided shape is converted into a non-rotated rectangle. Once // converted, the small edge of the rectangle must be greater than or equal // to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5 // is not). - repeated BoundingPoly bounding_polys = 3; + repeated BoundingPoly bounding_polys = 3 + [(google.api.field_behavior) = OPTIONAL]; } // Request message for the `CreateProduct` method. message CreateProductRequest { - // The project in which the Product should be created. + // Required. The project in which the Product should be created. // // Format is // `projects/PROJECT_ID/locations/LOC_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; - // The product to create. - Product product = 2; + // Required. The product to create. + Product product = 2 [(google.api.field_behavior) = REQUIRED]; // A user-supplied resource id for this Product. If set, the server will // attempt to use this value as the resource id. If it is already in use, an @@ -458,11 +529,16 @@ message CreateProductRequest { // Request message for the `ListProducts` method. message ListProductsRequest { - // The project OR ProductSet from which Products should be listed. + // Required. The project OR ProductSet from which Products should be listed. // // Format: // `projects/PROJECT_ID/locations/LOC_ID` - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // The maximum number of items to return. Default 10, maximum 100. int32 page_size = 2; @@ -483,18 +559,21 @@ message ListProductsResponse { // Request message for the `GetProduct` method. message GetProductRequest { - // Resource name of the Product to get. + // Required. Resource name of the Product to get. // // Format is: // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "vision.googleapis.com/Product" } + ]; } // Request message for the `UpdateProduct` method. message UpdateProductRequest { - // The Product resource which replaces the one on the server. + // Required. The Product resource which replaces the one on the server. // product.name is immutable. - Product product = 1; + Product product = 1 [(google.api.field_behavior) = REQUIRED]; // The [FieldMask][google.protobuf.FieldMask] that specifies which fields // to update. @@ -506,22 +585,30 @@ message UpdateProductRequest { // Request message for the `DeleteProduct` method. message DeleteProductRequest { - // Resource name of product to delete. + // Required. Resource name of product to delete. // // Format is: // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "vision.googleapis.com/Product" } + ]; } // Request message for the `CreateProductSet` method. message CreateProductSetRequest { - // The project in which the ProductSet should be created. + // Required. The project in which the ProductSet should be created. // // Format is `projects/PROJECT_ID/locations/LOC_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; - // The ProductSet to create. - ProductSet product_set = 2; + // Required. The ProductSet to create. + ProductSet product_set = 2 [(google.api.field_behavior) = REQUIRED]; // A user-supplied resource id for this ProductSet. If set, the server will // attempt to use this value as the resource id. If it is already in use, an @@ -532,10 +619,15 @@ message CreateProductSetRequest { // Request message for the `ListProductSets` method. message ListProductSetsRequest { - // The project from which ProductSets should be listed. + // Required. The project from which ProductSets should be listed. // // Format is `projects/PROJECT_ID/locations/LOC_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // The maximum number of items to return. Default 10, maximum 100. int32 page_size = 2; @@ -556,17 +648,22 @@ message ListProductSetsResponse { // Request message for the `GetProductSet` method. message GetProductSetRequest { - // Resource name of the ProductSet to get. + // Required. Resource name of the ProductSet to get. // // Format is: // `projects/PROJECT_ID/locations/LOG_ID/productSets/PRODUCT_SET_ID` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "vision.googleapis.com/ProductSet" + } + ]; } // Request message for the `UpdateProductSet` method. message UpdateProductSetRequest { - // The ProductSet resource which replaces the one on the server. - ProductSet product_set = 1; + // Required. The ProductSet resource which replaces the one on the server. + ProductSet product_set = 1 [(google.api.field_behavior) = REQUIRED]; // The [FieldMask][google.protobuf.FieldMask] that specifies which fields to // update. @@ -577,24 +674,33 @@ message UpdateProductSetRequest { // Request message for the `DeleteProductSet` method. message DeleteProductSetRequest { - // Resource name of the ProductSet to delete. + // Required. Resource name of the ProductSet to delete. // // Format is: // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "vision.googleapis.com/ProductSet" + } + ]; } // Request message for the `CreateReferenceImage` method. message CreateReferenceImageRequest { - // Resource name of the product in which to create the reference image. + // Required. Resource name of the product in which to create the reference + // image. // // Format is // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "vision.googleapis.com/Product" } + ]; - // The reference image to create. + // Required. The reference image to create. // If an image ID is specified, it is ignored. - ReferenceImage reference_image = 2; + ReferenceImage reference_image = 2 [(google.api.field_behavior) = REQUIRED]; // A user-supplied resource id for the ReferenceImage to be added. If set, // the server will attempt to use this value as the resource id. If it is @@ -605,11 +711,14 @@ message CreateReferenceImageRequest { // Request message for the `ListReferenceImages` method. message ListReferenceImagesRequest { - // Resource name of the product containing the reference images. + // Required. Resource name of the product containing the reference images. // // Format is // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "vision.googleapis.com/Product" } + ]; // The maximum number of items to return. Default 10, maximum 100. int32 page_size = 2; @@ -635,61 +744,93 @@ message ListReferenceImagesResponse { // Request message for the `GetReferenceImage` method. message GetReferenceImageRequest { - // The resource name of the ReferenceImage to get. + // Required. The resource name of the ReferenceImage to get. // // Format is: // // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "vision.googleapis.com/ReferenceImage" + } + ]; } // Request message for the `DeleteReferenceImage` method. message DeleteReferenceImageRequest { - // The resource name of the reference image to delete. + // Required. The resource name of the reference image to delete. // // Format is: // // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "vision.googleapis.com/ReferenceImage" + } + ]; } // Request message for the `AddProductToProductSet` method. message AddProductToProductSetRequest { - // The resource name for the ProductSet to modify. + // Required. The resource name for the ProductSet to modify. // // Format is: // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "vision.googleapis.com/ProductSet" + } + ]; - // The resource name for the Product to be added to this ProductSet. + // Required. The resource name for the Product to be added to this ProductSet. // // Format is: // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` - string product = 2; + string product = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "vision.googleapis.com/Product" } + ]; } // Request message for the `RemoveProductFromProductSet` method. message RemoveProductFromProductSetRequest { - // The resource name for the ProductSet to modify. + // Required. The resource name for the ProductSet to modify. // // Format is: // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "vision.googleapis.com/ProductSet" + } + ]; - // The resource name for the Product to be removed from this ProductSet. + // Required. The resource name for the Product to be removed from this + // ProductSet. // // Format is: // `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID` - string product = 2; + string product = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "vision.googleapis.com/Product" } + ]; } // Request message for the `ListProductsInProductSet` method. message ListProductsInProductSetRequest { - // The ProductSet resource for which to retrieve Products. + // Required. The ProductSet resource for which to retrieve Products. // // Format is: // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "vision.googleapis.com/ProductSet" + } + ]; // The maximum number of items to return. Default 10, maximum 100. int32 page_size = 2; @@ -789,13 +930,19 @@ message ImportProductSetsInputConfig { // Request message for the `ImportProductSets` method. message ImportProductSetsRequest { - // The project in which the ProductSets should be imported. + // Required. The project in which the ProductSets should be imported. // // Format is `projects/PROJECT_ID/locations/LOC_ID`. - string parent = 1; - - // The input content for the list of requests. - ImportProductSetsInputConfig input_config = 2; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The input content for the list of requests. + ImportProductSetsInputConfig input_config = 2 + [(google.api.field_behavior) = REQUIRED]; } // Response message for the `ImportProductSets` method. @@ -855,3 +1002,38 @@ message BatchOperationMetadata { // set to true. google.protobuf.Timestamp end_time = 3; } + +// Config to control which ProductSet contains the Products to be deleted. +message ProductSetPurgeConfig { + // The ProductSet that contains the Products to delete. If a Product is a + // member of product_set_id in addition to other ProductSets, the Product will + // still be deleted. + string product_set_id = 1; +} + +// Request message for the `PurgeProducts` method. +message PurgeProductsRequest { + // The Products to delete. + oneof target { + // Specify which ProductSet contains the Products to be deleted. + ProductSetPurgeConfig product_set_purge_config = 2; + + // If delete_orphan_products is true, all Products that are not in any + // ProductSet will be deleted. + bool delete_orphan_products = 3; + } + + // Required. The project and location in which the Products should be deleted. + // + // Format is `projects/PROJECT_ID/locations/LOC_ID`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // The default value is false. Override this value to true to actually perform + // the purge. + bool force = 4; +} diff --git a/vision/google/cloud/vision_v1p4beta1/proto/product_search_service_pb2.py b/vision/google/cloud/vision_v1p4beta1/proto/product_search_service_pb2.py index d3be79d27c5c..34f1d9d09ecd 100644 --- a/vision/google/cloud/vision_v1p4beta1/proto/product_search_service_pb2.py +++ b/vision/google/cloud/vision_v1p4beta1/proto/product_search_service_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.vision_v1p4beta1.proto import ( geometry_pb2 as google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_geometry__pb2, ) @@ -36,10 +39,13 @@ "\n!com.google.cloud.vision.v1p4beta1B\031ProductSearchServiceProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\370\001\001\242\002\004GCVN" ), serialized_pb=_b( - '\n@google/cloud/vision_v1p4beta1/proto/product_search_service.proto\x12\x1dgoogle.cloud.vision.v1p4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x32google/cloud/vision_v1p4beta1/proto/geometry.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xcd\x01\n\x07Product\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x18\n\x10product_category\x18\x04 \x01(\t\x12G\n\x0eproduct_labels\x18\x05 \x03(\x0b\x32/.google.cloud.vision.v1p4beta1.Product.KeyValue\x1a&\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t"\x89\x01\n\nProductSet\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12.\n\nindex_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\x0bindex_error\x18\x04 \x01(\x0b\x32\x12.google.rpc.Status"p\n\x0eReferenceImage\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03uri\x18\x02 \x01(\t\x12\x43\n\x0e\x62ounding_polys\x18\x03 \x03(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPoly"s\n\x14\x43reateProductRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x37\n\x07product\x18\x02 \x01(\x0b\x32&.google.cloud.vision.v1p4beta1.Product\x12\x12\n\nproduct_id\x18\x03 \x01(\t"L\n\x13ListProductsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"i\n\x14ListProductsResponse\x12\x38\n\x08products\x18\x01 \x03(\x0b\x32&.google.cloud.vision.v1p4beta1.Product\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"!\n\x11GetProductRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x80\x01\n\x14UpdateProductRequest\x12\x37\n\x07product\x18\x01 \x01(\x0b\x32&.google.cloud.vision.v1p4beta1.Product\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"$\n\x14\x44\x65leteProductRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x81\x01\n\x17\x43reateProductSetRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12>\n\x0bproduct_set\x18\x02 \x01(\x0b\x32).google.cloud.vision.v1p4beta1.ProductSet\x12\x16\n\x0eproduct_set_id\x18\x03 \x01(\t"O\n\x16ListProductSetsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"s\n\x17ListProductSetsResponse\x12?\n\x0cproduct_sets\x18\x01 \x03(\x0b\x32).google.cloud.vision.v1p4beta1.ProductSet\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"$\n\x14GetProductSetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x8a\x01\n\x17UpdateProductSetRequest\x12>\n\x0bproduct_set\x18\x01 \x01(\x0b\x32).google.cloud.vision.v1p4beta1.ProductSet\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\'\n\x17\x44\x65leteProductSetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x91\x01\n\x1b\x43reateReferenceImageRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x46\n\x0freference_image\x18\x02 \x01(\x0b\x32-.google.cloud.vision.v1p4beta1.ReferenceImage\x12\x1a\n\x12reference_image_id\x18\x03 \x01(\t"S\n\x1aListReferenceImagesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"\x92\x01\n\x1bListReferenceImagesResponse\x12G\n\x10reference_images\x18\x01 \x03(\x0b\x32-.google.cloud.vision.v1p4beta1.ReferenceImage\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x17\n\x0fnext_page_token\x18\x03 \x01(\t"(\n\x18GetReferenceImageRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"+\n\x1b\x44\x65leteReferenceImageRequest\x12\x0c\n\x04name\x18\x01 \x01(\t">\n\x1d\x41\x64\x64ProductToProductSetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07product\x18\x02 \x01(\t"C\n"RemoveProductFromProductSetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07product\x18\x02 \x01(\t"V\n\x1fListProductsInProductSetRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"u\n ListProductsInProductSetResponse\x12\x38\n\x08products\x18\x01 \x03(\x0b\x32&.google.cloud.vision.v1p4beta1.Product\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"2\n\x1aImportProductSetsGcsSource\x12\x14\n\x0c\x63sv_file_uri\x18\x01 \x01(\t"y\n\x1cImportProductSetsInputConfig\x12O\n\ngcs_source\x18\x01 \x01(\x0b\x32\x39.google.cloud.vision.v1p4beta1.ImportProductSetsGcsSourceH\x00\x42\x08\n\x06source"}\n\x18ImportProductSetsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12Q\n\x0cinput_config\x18\x02 \x01(\x0b\x32;.google.cloud.vision.v1p4beta1.ImportProductSetsInputConfig"\x8a\x01\n\x19ImportProductSetsResponse\x12G\n\x10reference_images\x18\x01 \x03(\x0b\x32-.google.cloud.vision.v1p4beta1.ReferenceImage\x12$\n\x08statuses\x18\x02 \x03(\x0b\x32\x12.google.rpc.Status"\x9e\x02\n\x16\x42\x61tchOperationMetadata\x12J\n\x05state\x18\x01 \x01(\x0e\x32;.google.cloud.vision.v1p4beta1.BatchOperationMetadata.State\x12/\n\x0bsubmit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"Y\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0e\n\nPROCESSING\x10\x01\x12\x0e\n\nSUCCESSFUL\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\r\n\tCANCELLED\x10\x04\x32\x9f\x1b\n\rProductSearch\x12\xc2\x01\n\x10\x43reateProductSet\x12\x36.google.cloud.vision.v1p4beta1.CreateProductSetRequest\x1a).google.cloud.vision.v1p4beta1.ProductSet"K\x82\xd3\xe4\x93\x02\x45"6/v1p4beta1/{parent=projects/*/locations/*}/productSets:\x0bproduct_set\x12\xc0\x01\n\x0fListProductSets\x12\x35.google.cloud.vision.v1p4beta1.ListProductSetsRequest\x1a\x36.google.cloud.vision.v1p4beta1.ListProductSetsResponse">\x82\xd3\xe4\x93\x02\x38\x12\x36/v1p4beta1/{parent=projects/*/locations/*}/productSets\x12\xaf\x01\n\rGetProductSet\x12\x33.google.cloud.vision.v1p4beta1.GetProductSetRequest\x1a).google.cloud.vision.v1p4beta1.ProductSet">\x82\xd3\xe4\x93\x02\x38\x12\x36/v1p4beta1/{name=projects/*/locations/*/productSets/*}\x12\xce\x01\n\x10UpdateProductSet\x12\x36.google.cloud.vision.v1p4beta1.UpdateProductSetRequest\x1a).google.cloud.vision.v1p4beta1.ProductSet"W\x82\xd3\xe4\x93\x02Q2B/v1p4beta1/{product_set.name=projects/*/locations/*/productSets/*}:\x0bproduct_set\x12\xa2\x01\n\x10\x44\x65leteProductSet\x12\x36.google.cloud.vision.v1p4beta1.DeleteProductSetRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x38*6/v1p4beta1/{name=projects/*/locations/*/productSets/*}\x12\xb2\x01\n\rCreateProduct\x12\x33.google.cloud.vision.v1p4beta1.CreateProductRequest\x1a&.google.cloud.vision.v1p4beta1.Product"D\x82\xd3\xe4\x93\x02>"3/v1p4beta1/{parent=projects/*/locations/*}/products:\x07product\x12\xb4\x01\n\x0cListProducts\x12\x32.google.cloud.vision.v1p4beta1.ListProductsRequest\x1a\x33.google.cloud.vision.v1p4beta1.ListProductsResponse";\x82\xd3\xe4\x93\x02\x35\x12\x33/v1p4beta1/{parent=projects/*/locations/*}/products\x12\xa3\x01\n\nGetProduct\x12\x30.google.cloud.vision.v1p4beta1.GetProductRequest\x1a&.google.cloud.vision.v1p4beta1.Product";\x82\xd3\xe4\x93\x02\x35\x12\x33/v1p4beta1/{name=projects/*/locations/*/products/*}\x12\xba\x01\n\rUpdateProduct\x12\x33.google.cloud.vision.v1p4beta1.UpdateProductRequest\x1a&.google.cloud.vision.v1p4beta1.Product"L\x82\xd3\xe4\x93\x02\x46\x32;/v1p4beta1/{product.name=projects/*/locations/*/products/*}:\x07product\x12\x99\x01\n\rDeleteProduct\x12\x33.google.cloud.vision.v1p4beta1.DeleteProductRequest\x1a\x16.google.protobuf.Empty";\x82\xd3\xe4\x93\x02\x35*3/v1p4beta1/{name=projects/*/locations/*/products/*}\x12\xe1\x01\n\x14\x43reateReferenceImage\x12:.google.cloud.vision.v1p4beta1.CreateReferenceImageRequest\x1a-.google.cloud.vision.v1p4beta1.ReferenceImage"^\x82\xd3\xe4\x93\x02X"E/v1p4beta1/{parent=projects/*/locations/*/products/*}/referenceImages:\x0freference_image\x12\xb9\x01\n\x14\x44\x65leteReferenceImage\x12:.google.cloud.vision.v1p4beta1.DeleteReferenceImageRequest\x1a\x16.google.protobuf.Empty"M\x82\xd3\xe4\x93\x02G*E/v1p4beta1/{name=projects/*/locations/*/products/*/referenceImages/*}\x12\xdb\x01\n\x13ListReferenceImages\x12\x39.google.cloud.vision.v1p4beta1.ListReferenceImagesRequest\x1a:.google.cloud.vision.v1p4beta1.ListReferenceImagesResponse"M\x82\xd3\xe4\x93\x02G\x12\x45/v1p4beta1/{parent=projects/*/locations/*/products/*}/referenceImages\x12\xca\x01\n\x11GetReferenceImage\x12\x37.google.cloud.vision.v1p4beta1.GetReferenceImageRequest\x1a-.google.cloud.vision.v1p4beta1.ReferenceImage"M\x82\xd3\xe4\x93\x02G\x12\x45/v1p4beta1/{name=projects/*/locations/*/products/*/referenceImages/*}\x12\xbc\x01\n\x16\x41\x64\x64ProductToProductSet\x12<.google.cloud.vision.v1p4beta1.AddProductToProductSetRequest\x1a\x16.google.protobuf.Empty"L\x82\xd3\xe4\x93\x02\x46"A/v1p4beta1/{name=projects/*/locations/*/productSets/*}:addProduct:\x01*\x12\xc9\x01\n\x1bRemoveProductFromProductSet\x12\x41.google.cloud.vision.v1p4beta1.RemoveProductFromProductSetRequest\x1a\x16.google.protobuf.Empty"O\x82\xd3\xe4\x93\x02I"D/v1p4beta1/{name=projects/*/locations/*/productSets/*}:removeProduct:\x01*\x12\xe4\x01\n\x18ListProductsInProductSet\x12>.google.cloud.vision.v1p4beta1.ListProductsInProductSetRequest\x1a?.google.cloud.vision.v1p4beta1.ListProductsInProductSetResponse"G\x82\xd3\xe4\x93\x02\x41\x12?/v1p4beta1/{name=projects/*/locations/*/productSets/*}/products\x12\xb5\x01\n\x11ImportProductSets\x12\x37.google.cloud.vision.v1p4beta1.ImportProductSetsRequest\x1a\x1d.google.longrunning.Operation"H\x82\xd3\xe4\x93\x02\x42"=/v1p4beta1/{parent=projects/*/locations/*}/productSets:import:\x01*B\x8f\x01\n!com.google.cloud.vision.v1p4beta1B\x19ProductSearchServiceProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\xf8\x01\x01\xa2\x02\x04GCVNb\x06proto3' + '\n@google/cloud/vision_v1p4beta1/proto/product_search_service.proto\x12\x1dgoogle.cloud.vision.v1p4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x32google/cloud/vision_v1p4beta1/proto/geometry.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xb2\x02\n\x07Product\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x1d\n\x10product_category\x18\x04 \x01(\tB\x03\xe0\x41\x05\x12G\n\x0eproduct_labels\x18\x05 \x03(\x0b\x32/.google.cloud.vision.v1p4beta1.Product.KeyValue\x1a&\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:^\xea\x41[\n\x1dvision.googleapis.com/Product\x12:projects/{project}/locations/{location}/products/{product}"\xfd\x01\n\nProductSet\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x33\n\nindex_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12,\n\x0bindex_error\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusB\x03\xe0\x41\x03:h\xea\x41\x65\n vision.googleapis.com/ProductSet\x12\x41projects/{project}/locations/{location}/productSets/{product_set}"\x85\x02\n\x0eReferenceImage\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x03uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12H\n\x0e\x62ounding_polys\x18\x03 \x03(\x0b\x32+.google.cloud.vision.v1p4beta1.BoundingPolyB\x03\xe0\x41\x01:\x88\x01\xea\x41\x84\x01\n$vision.googleapis.com/ReferenceImage\x12\\projects/{project}/locations/{location}/products/{product}/referenceImages/{reference_image}"\xa3\x01\n\x14\x43reateProductRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12<\n\x07product\x18\x02 \x01(\x0b\x32&.google.cloud.vision.v1p4beta1.ProductB\x03\xe0\x41\x02\x12\x12\n\nproduct_id\x18\x03 \x01(\t"w\n\x13ListProductsRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"i\n\x14ListProductsResponse\x12\x38\n\x08products\x18\x01 \x03(\x0b\x32&.google.cloud.vision.v1p4beta1.Product\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x11GetProductRequest\x12\x33\n\x04name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dvision.googleapis.com/Product"\x85\x01\n\x14UpdateProductRequest\x12<\n\x07product\x18\x01 \x01(\x0b\x32&.google.cloud.vision.v1p4beta1.ProductB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x14\x44\x65leteProductRequest\x12\x33\n\x04name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dvision.googleapis.com/Product"\xb1\x01\n\x17\x43reateProductSetRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x43\n\x0bproduct_set\x18\x02 \x01(\x0b\x32).google.cloud.vision.v1p4beta1.ProductSetB\x03\xe0\x41\x02\x12\x16\n\x0eproduct_set_id\x18\x03 \x01(\t"z\n\x16ListProductSetsRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"s\n\x17ListProductSetsResponse\x12?\n\x0cproduct_sets\x18\x01 \x03(\x0b\x32).google.cloud.vision.v1p4beta1.ProductSet\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"N\n\x14GetProductSetRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n vision.googleapis.com/ProductSet"\x8f\x01\n\x17UpdateProductSetRequest\x12\x43\n\x0bproduct_set\x18\x01 \x01(\x0b\x32).google.cloud.vision.v1p4beta1.ProductSetB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"Q\n\x17\x44\x65leteProductSetRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n vision.googleapis.com/ProductSet"\xbd\x01\n\x1b\x43reateReferenceImageRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dvision.googleapis.com/Product\x12K\n\x0freference_image\x18\x02 \x01(\x0b\x32-.google.cloud.vision.v1p4beta1.ReferenceImageB\x03\xe0\x41\x02\x12\x1a\n\x12reference_image_id\x18\x03 \x01(\t"z\n\x1aListReferenceImagesRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dvision.googleapis.com/Product\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"\x92\x01\n\x1bListReferenceImagesResponse\x12G\n\x10reference_images\x18\x01 \x03(\x0b\x32-.google.cloud.vision.v1p4beta1.ReferenceImage\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x17\n\x0fnext_page_token\x18\x03 \x01(\t"V\n\x18GetReferenceImageRequest\x12:\n\x04name\x18\x01 \x01(\tB,\xe0\x41\x02\xfa\x41&\n$vision.googleapis.com/ReferenceImage"Y\n\x1b\x44\x65leteReferenceImageRequest\x12:\n\x04name\x18\x01 \x01(\tB,\xe0\x41\x02\xfa\x41&\n$vision.googleapis.com/ReferenceImage"\x8f\x01\n\x1d\x41\x64\x64ProductToProductSetRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n vision.googleapis.com/ProductSet\x12\x36\n\x07product\x18\x02 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dvision.googleapis.com/Product"\x94\x01\n"RemoveProductFromProductSetRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n vision.googleapis.com/ProductSet\x12\x36\n\x07product\x18\x02 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1dvision.googleapis.com/Product"\x80\x01\n\x1fListProductsInProductSetRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n vision.googleapis.com/ProductSet\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"u\n ListProductsInProductSetResponse\x12\x38\n\x08products\x18\x01 \x03(\x0b\x32&.google.cloud.vision.v1p4beta1.Product\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"2\n\x1aImportProductSetsGcsSource\x12\x14\n\x0c\x63sv_file_uri\x18\x01 \x01(\t"y\n\x1cImportProductSetsInputConfig\x12O\n\ngcs_source\x18\x01 \x01(\x0b\x32\x39.google.cloud.vision.v1p4beta1.ImportProductSetsGcsSourceH\x00\x42\x08\n\x06source"\xad\x01\n\x18ImportProductSetsRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12V\n\x0cinput_config\x18\x02 \x01(\x0b\x32;.google.cloud.vision.v1p4beta1.ImportProductSetsInputConfigB\x03\xe0\x41\x02"\x8a\x01\n\x19ImportProductSetsResponse\x12G\n\x10reference_images\x18\x01 \x03(\x0b\x32-.google.cloud.vision.v1p4beta1.ReferenceImage\x12$\n\x08statuses\x18\x02 \x03(\x0b\x32\x12.google.rpc.Status"\x9e\x02\n\x16\x42\x61tchOperationMetadata\x12J\n\x05state\x18\x01 \x01(\x0e\x32;.google.cloud.vision.v1p4beta1.BatchOperationMetadata.State\x12/\n\x0bsubmit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"Y\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0e\n\nPROCESSING\x10\x01\x12\x0e\n\nSUCCESSFUL\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\r\n\tCANCELLED\x10\x04"/\n\x15ProductSetPurgeConfig\x12\x16\n\x0eproduct_set_id\x18\x01 \x01(\t"\xe6\x01\n\x14PurgeProductsRequest\x12X\n\x18product_set_purge_config\x18\x02 \x01(\x0b\x32\x34.google.cloud.vision.v1p4beta1.ProductSetPurgeConfigH\x00\x12 \n\x16\x64\x65lete_orphan_products\x18\x03 \x01(\x08H\x00\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\r\n\x05\x66orce\x18\x04 \x01(\x08\x42\x08\n\x06target2\xd2 \n\rProductSearch\x12\xe6\x01\n\x10\x43reateProductSet\x12\x36.google.cloud.vision.v1p4beta1.CreateProductSetRequest\x1a).google.cloud.vision.v1p4beta1.ProductSet"o\x82\xd3\xe4\x93\x02\x45"6/v1p4beta1/{parent=projects/*/locations/*}/productSets:\x0bproduct_set\xda\x41!parent,product_set,product_set_id\x12\xc9\x01\n\x0fListProductSets\x12\x35.google.cloud.vision.v1p4beta1.ListProductSetsRequest\x1a\x36.google.cloud.vision.v1p4beta1.ListProductSetsResponse"G\x82\xd3\xe4\x93\x02\x38\x12\x36/v1p4beta1/{parent=projects/*/locations/*}/productSets\xda\x41\x06parent\x12\xb6\x01\n\rGetProductSet\x12\x33.google.cloud.vision.v1p4beta1.GetProductSetRequest\x1a).google.cloud.vision.v1p4beta1.ProductSet"E\x82\xd3\xe4\x93\x02\x38\x12\x36/v1p4beta1/{name=projects/*/locations/*/productSets/*}\xda\x41\x04name\x12\xe8\x01\n\x10UpdateProductSet\x12\x36.google.cloud.vision.v1p4beta1.UpdateProductSetRequest\x1a).google.cloud.vision.v1p4beta1.ProductSet"q\x82\xd3\xe4\x93\x02Q2B/v1p4beta1/{product_set.name=projects/*/locations/*/productSets/*}:\x0bproduct_set\xda\x41\x17product_set,update_mask\x12\xa9\x01\n\x10\x44\x65leteProductSet\x12\x36.google.cloud.vision.v1p4beta1.DeleteProductSetRequest\x1a\x16.google.protobuf.Empty"E\x82\xd3\xe4\x93\x02\x38*6/v1p4beta1/{name=projects/*/locations/*/productSets/*}\xda\x41\x04name\x12\xce\x01\n\rCreateProduct\x12\x33.google.cloud.vision.v1p4beta1.CreateProductRequest\x1a&.google.cloud.vision.v1p4beta1.Product"`\x82\xd3\xe4\x93\x02>"3/v1p4beta1/{parent=projects/*/locations/*}/products:\x07product\xda\x41\x19parent,product,product_id\x12\xbd\x01\n\x0cListProducts\x12\x32.google.cloud.vision.v1p4beta1.ListProductsRequest\x1a\x33.google.cloud.vision.v1p4beta1.ListProductsResponse"D\x82\xd3\xe4\x93\x02\x35\x12\x33/v1p4beta1/{parent=projects/*/locations/*}/products\xda\x41\x06parent\x12\xaa\x01\n\nGetProduct\x12\x30.google.cloud.vision.v1p4beta1.GetProductRequest\x1a&.google.cloud.vision.v1p4beta1.Product"B\x82\xd3\xe4\x93\x02\x35\x12\x33/v1p4beta1/{name=projects/*/locations/*/products/*}\xda\x41\x04name\x12\xd0\x01\n\rUpdateProduct\x12\x33.google.cloud.vision.v1p4beta1.UpdateProductRequest\x1a&.google.cloud.vision.v1p4beta1.Product"b\x82\xd3\xe4\x93\x02\x46\x32;/v1p4beta1/{product.name=projects/*/locations/*/products/*}:\x07product\xda\x41\x13product,update_mask\x12\xa0\x01\n\rDeleteProduct\x12\x33.google.cloud.vision.v1p4beta1.DeleteProductRequest\x1a\x16.google.protobuf.Empty"B\x82\xd3\xe4\x93\x02\x35*3/v1p4beta1/{name=projects/*/locations/*/products/*}\xda\x41\x04name\x12\x8e\x02\n\x14\x43reateReferenceImage\x12:.google.cloud.vision.v1p4beta1.CreateReferenceImageRequest\x1a-.google.cloud.vision.v1p4beta1.ReferenceImage"\x8a\x01\x82\xd3\xe4\x93\x02X"E/v1p4beta1/{parent=projects/*/locations/*/products/*}/referenceImages:\x0freference_image\xda\x41)parent,reference_image,reference_image_id\x12\xc0\x01\n\x14\x44\x65leteReferenceImage\x12:.google.cloud.vision.v1p4beta1.DeleteReferenceImageRequest\x1a\x16.google.protobuf.Empty"T\x82\xd3\xe4\x93\x02G*E/v1p4beta1/{name=projects/*/locations/*/products/*/referenceImages/*}\xda\x41\x04name\x12\xe4\x01\n\x13ListReferenceImages\x12\x39.google.cloud.vision.v1p4beta1.ListReferenceImagesRequest\x1a:.google.cloud.vision.v1p4beta1.ListReferenceImagesResponse"V\x82\xd3\xe4\x93\x02G\x12\x45/v1p4beta1/{parent=projects/*/locations/*/products/*}/referenceImages\xda\x41\x06parent\x12\xd1\x01\n\x11GetReferenceImage\x12\x37.google.cloud.vision.v1p4beta1.GetReferenceImageRequest\x1a-.google.cloud.vision.v1p4beta1.ReferenceImage"T\x82\xd3\xe4\x93\x02G\x12\x45/v1p4beta1/{name=projects/*/locations/*/products/*/referenceImages/*}\xda\x41\x04name\x12\xcb\x01\n\x16\x41\x64\x64ProductToProductSet\x12<.google.cloud.vision.v1p4beta1.AddProductToProductSetRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02\x46"A/v1p4beta1/{name=projects/*/locations/*/productSets/*}:addProduct:\x01*\xda\x41\x0cname,product\x12\xd8\x01\n\x1bRemoveProductFromProductSet\x12\x41.google.cloud.vision.v1p4beta1.RemoveProductFromProductSetRequest\x1a\x16.google.protobuf.Empty"^\x82\xd3\xe4\x93\x02I"D/v1p4beta1/{name=projects/*/locations/*/productSets/*}:removeProduct:\x01*\xda\x41\x0cname,product\x12\xeb\x01\n\x18ListProductsInProductSet\x12>.google.cloud.vision.v1p4beta1.ListProductsInProductSetRequest\x1a?.google.cloud.vision.v1p4beta1.ListProductsInProductSetResponse"N\x82\xd3\xe4\x93\x02\x41\x12?/v1p4beta1/{name=projects/*/locations/*/productSets/*}/products\xda\x41\x04name\x12\x82\x02\n\x11ImportProductSets\x12\x37.google.cloud.vision.v1p4beta1.ImportProductSetsRequest\x1a\x1d.google.longrunning.Operation"\x94\x01\x82\xd3\xe4\x93\x02\x42"=/v1p4beta1/{parent=projects/*/locations/*}/productSets:import:\x01*\xda\x41\x13parent,input_config\xca\x41\x33\n\x19ImportProductSetsResponse\x12\x16\x42\x61tchOperationMetadata\x12\xe4\x01\n\rPurgeProducts\x12\x33.google.cloud.vision.v1p4beta1.PurgeProductsRequest\x1a\x1d.google.longrunning.Operation"\x7f\x82\xd3\xe4\x93\x02>"9/v1p4beta1/{parent=projects/*/locations/*}/products:purge:\x01*\xda\x41\x06parent\xca\x41/\n\x15google.protobuf.Empty\x12\x16\x42\x61tchOperationMetadata\x1av\xca\x41\x15vision.googleapis.com\xd2\x41[https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-visionB\x8f\x01\n!com.google.cloud.vision.v1p4beta1B\x19ProductSearchServiceProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\xf8\x01\x01\xa2\x02\x04GCVNb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_geometry__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, @@ -78,8 +84,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3307, - serialized_end=3396, + serialized_start=4545, + serialized_end=4634, ) _sym_db.RegisterEnumDescriptor(_BATCHOPERATIONMETADATA_STATE) @@ -136,8 +142,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=507, - serialized_end=545, + serialized_start=597, + serialized_end=635, ) _PRODUCT = _descriptor.Descriptor( @@ -216,7 +222,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\005"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -241,13 +247,15 @@ extensions=[], nested_types=[_PRODUCT_KEYVALUE], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A[\n\035vision.googleapis.com/Product\022:projects/{project}/locations/{location}/products/{product}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=340, - serialized_end=545, + serialized_start=425, + serialized_end=731, ) @@ -309,7 +317,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -327,20 +335,22 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Ae\n vision.googleapis.com/ProductSet\022Aprojects/{project}/locations/{location}/productSets/{product_set}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=548, - serialized_end=685, + serialized_start=734, + serialized_end=987, ) @@ -384,7 +394,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -402,20 +412,22 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A\204\001\n$vision.googleapis.com/ReferenceImage\022\\projects/{project}/locations/{location}/products/{product}/referenceImages/{reference_image}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=687, - serialized_end=799, + serialized_start=990, + serialized_end=1251, ) @@ -441,7 +453,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -459,7 +473,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -489,8 +503,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=801, - serialized_end=916, + serialized_start=1254, + serialized_end=1417, ) @@ -516,7 +530,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -564,8 +580,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=918, - serialized_end=994, + serialized_start=1419, + serialized_end=1538, ) @@ -621,8 +637,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=996, - serialized_end=1101, + serialized_start=1540, + serialized_end=1645, ) @@ -648,7 +664,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\037\n\035vision.googleapis.com/Product" + ), file=DESCRIPTOR, ) ], @@ -660,8 +678,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1103, - serialized_end=1136, + serialized_start=1647, + serialized_end=1719, ) @@ -687,7 +705,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -717,8 +735,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1139, - serialized_end=1267, + serialized_start=1722, + serialized_end=1855, ) @@ -744,7 +762,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\037\n\035vision.googleapis.com/Product" + ), file=DESCRIPTOR, ) ], @@ -756,8 +776,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1269, - serialized_end=1305, + serialized_start=1857, + serialized_end=1932, ) @@ -783,7 +803,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -801,7 +823,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -831,8 +853,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1308, - serialized_end=1437, + serialized_start=1935, + serialized_end=2112, ) @@ -858,7 +880,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -906,8 +930,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1439, - serialized_end=1518, + serialized_start=2114, + serialized_end=2236, ) @@ -963,8 +987,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1520, - serialized_end=1635, + serialized_start=2238, + serialized_end=2353, ) @@ -990,7 +1014,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\002\372A"\n vision.googleapis.com/ProductSet'), file=DESCRIPTOR, ) ], @@ -1002,8 +1026,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1637, - serialized_end=1673, + serialized_start=2355, + serialized_end=2433, ) @@ -1029,7 +1053,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1059,8 +1083,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1676, - serialized_end=1814, + serialized_start=2436, + serialized_end=2579, ) @@ -1086,7 +1110,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\002\372A"\n vision.googleapis.com/ProductSet'), file=DESCRIPTOR, ) ], @@ -1098,8 +1122,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1816, - serialized_end=1855, + serialized_start=2581, + serialized_end=2662, ) @@ -1125,7 +1149,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\037\n\035vision.googleapis.com/Product" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1143,7 +1169,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1173,8 +1199,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1858, - serialized_end=2003, + serialized_start=2665, + serialized_end=2854, ) @@ -1200,7 +1226,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\037\n\035vision.googleapis.com/Product" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1248,8 +1276,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2005, - serialized_end=2088, + serialized_start=2856, + serialized_end=2978, ) @@ -1323,8 +1351,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2091, - serialized_end=2237, + serialized_start=2981, + serialized_end=3127, ) @@ -1350,7 +1378,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A&\n$vision.googleapis.com/ReferenceImage" + ), file=DESCRIPTOR, ) ], @@ -1362,8 +1392,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2239, - serialized_end=2279, + serialized_start=3129, + serialized_end=3215, ) @@ -1389,7 +1419,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A&\n$vision.googleapis.com/ReferenceImage" + ), file=DESCRIPTOR, ) ], @@ -1401,8 +1433,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2281, - serialized_end=2324, + serialized_start=3217, + serialized_end=3306, ) @@ -1428,7 +1460,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\002\372A"\n vision.googleapis.com/ProductSet'), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1446,7 +1478,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\037\n\035vision.googleapis.com/Product" + ), file=DESCRIPTOR, ), ], @@ -1458,8 +1492,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2326, - serialized_end=2388, + serialized_start=3309, + serialized_end=3452, ) @@ -1485,7 +1519,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\002\372A"\n vision.googleapis.com/ProductSet'), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1503,7 +1537,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\037\n\035vision.googleapis.com/Product" + ), file=DESCRIPTOR, ), ], @@ -1515,8 +1551,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2390, - serialized_end=2457, + serialized_start=3455, + serialized_end=3603, ) @@ -1542,7 +1578,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\002\372A"\n vision.googleapis.com/ProductSet'), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1590,8 +1626,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2459, - serialized_end=2545, + serialized_start=3606, + serialized_end=3734, ) @@ -1647,8 +1683,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2547, - serialized_end=2664, + serialized_start=3736, + serialized_end=3853, ) @@ -1686,8 +1722,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2666, - serialized_end=2716, + serialized_start=3855, + serialized_end=3905, ) @@ -1733,8 +1769,8 @@ fields=[], ) ], - serialized_start=2718, - serialized_end=2839, + serialized_start=3907, + serialized_end=4028, ) @@ -1760,7 +1796,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1778,7 +1816,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1790,8 +1828,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2841, - serialized_end=2966, + serialized_start=4031, + serialized_end=4204, ) @@ -1847,8 +1885,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2969, - serialized_end=3107, + serialized_start=4207, + serialized_end=4345, ) @@ -1922,8 +1960,150 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3110, - serialized_end=3396, + serialized_start=4348, + serialized_end=4634, +) + + +_PRODUCTSETPURGECONFIG = _descriptor.Descriptor( + name="ProductSetPurgeConfig", + full_name="google.cloud.vision.v1p4beta1.ProductSetPurgeConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="product_set_id", + full_name="google.cloud.vision.v1p4beta1.ProductSetPurgeConfig.product_set_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4636, + serialized_end=4683, +) + + +_PURGEPRODUCTSREQUEST = _descriptor.Descriptor( + name="PurgeProductsRequest", + full_name="google.cloud.vision.v1p4beta1.PurgeProductsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="product_set_purge_config", + full_name="google.cloud.vision.v1p4beta1.PurgeProductsRequest.product_set_purge_config", + index=0, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="delete_orphan_products", + full_name="google.cloud.vision.v1p4beta1.PurgeProductsRequest.delete_orphan_products", + index=1, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.vision.v1p4beta1.PurgeProductsRequest.parent", + index=2, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="force", + full_name="google.cloud.vision.v1p4beta1.PurgeProductsRequest.force", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="target", + full_name="google.cloud.vision.v1p4beta1.PurgeProductsRequest.target", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=4686, + serialized_end=4916, ) _PRODUCT_KEYVALUE.containing_type = _PRODUCT @@ -1986,6 +2166,21 @@ "end_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _BATCHOPERATIONMETADATA_STATE.containing_type = _BATCHOPERATIONMETADATA +_PURGEPRODUCTSREQUEST.fields_by_name[ + "product_set_purge_config" +].message_type = _PRODUCTSETPURGECONFIG +_PURGEPRODUCTSREQUEST.oneofs_by_name["target"].fields.append( + _PURGEPRODUCTSREQUEST.fields_by_name["product_set_purge_config"] +) +_PURGEPRODUCTSREQUEST.fields_by_name[ + "product_set_purge_config" +].containing_oneof = _PURGEPRODUCTSREQUEST.oneofs_by_name["target"] +_PURGEPRODUCTSREQUEST.oneofs_by_name["target"].fields.append( + _PURGEPRODUCTSREQUEST.fields_by_name["delete_orphan_products"] +) +_PURGEPRODUCTSREQUEST.fields_by_name[ + "delete_orphan_products" +].containing_oneof = _PURGEPRODUCTSREQUEST.oneofs_by_name["target"] DESCRIPTOR.message_types_by_name["Product"] = _PRODUCT DESCRIPTOR.message_types_by_name["ProductSet"] = _PRODUCTSET DESCRIPTOR.message_types_by_name["ReferenceImage"] = _REFERENCEIMAGE @@ -2037,6 +2232,8 @@ "ImportProductSetsResponse" ] = _IMPORTPRODUCTSETSRESPONSE DESCRIPTOR.message_types_by_name["BatchOperationMetadata"] = _BATCHOPERATIONMETADATA +DESCRIPTOR.message_types_by_name["ProductSetPurgeConfig"] = _PRODUCTSETPURGECONFIG +DESCRIPTOR.message_types_by_name["PurgeProductsRequest"] = _PURGEPRODUCTSREQUEST _sym_db.RegisterFileDescriptor(DESCRIPTOR) Product = _reflection.GeneratedProtocolMessageType( @@ -2080,15 +2277,11 @@ User-provided metadata to be stored with this product. Must be at most 4096 characters long. product_category: - The category for the product identified by the reference - image. This should be either "homegoods-v2", "apparel-v2", - "toys-v2", or "packagedgoods-v1". The legacy categories - "homegoods", "apparel", and "toys" are still supported but - will be deprecated. For new products, please use - "homegoods-v2", "apparel-v2", or "toys-v2" for better product - search accuracy. It is recommended to migrate existing - products to these categories as well. This field is - immutable. + Immutable. The category for the product identified by the + reference image. This should be either "homegoods-v2", + "apparel-v2", or "toys-v2". The legacy categories "homegoods", + "apparel", and "toys" are still supported, but these should + not be used for new products. product_labels: Key-value pairs that can be attached to a product. At query time, constraints can be specified based on the @@ -2096,7 +2289,10 @@ strings, e.g. "1199". Only strings with integer values can match a range-based restriction which is to be supported soon. Multiple values can be assigned to the same key. One product - may have up to 100 product\_labels. + may have up to 500 product\_labels. Notice that the total + number of distinct product\_labels over all products in one + ProductSet cannot exceed 1M, otherwise the product search + pipeline will refuse to work for that ProductSet. """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.Product) ), @@ -2156,12 +2352,12 @@ mages/IMAGE_ID``. This field is ignored when creating a reference image. uri: - The Google Cloud Storage URI of the reference image. The URI - must start with ``gs://``. Required. + Required. The Google Cloud Storage URI of the reference image. + The URI must start with ``gs://``. bounding_polys: - Bounding polygons around the areas of interest in the - reference image. Optional. If this field is empty, the system - will try to detect regions of interest. At most 10 bounding + Optional. Bounding polygons around the areas of interest in + the reference image. If this field is empty, the system will + try to detect regions of interest. At most 10 bounding polygons will be used. The provided shape is converted into a non-rotated rectangle. Once converted, the small edge of the rectangle must be greater than or equal to 300 pixels. The @@ -2183,10 +2379,10 @@ Attributes: parent: - The project in which the Product should be created. Format is - ``projects/PROJECT_ID/locations/LOC_ID``. + Required. The project in which the Product should be created. + Format is ``projects/PROJECT_ID/locations/LOC_ID``. product: - The product to create. + Required. The product to create. product_id: A user-supplied resource id for this Product. If set, the server will attempt to use this value as the resource id. If @@ -2210,8 +2406,8 @@ Attributes: parent: - The project OR ProductSet from which Products should be - listed. Format: ``projects/PROJECT_ID/locations/LOC_ID`` + Required. The project OR ProductSet from which Products should + be listed. Format: ``projects/PROJECT_ID/locations/LOC_ID`` page_size: The maximum number of items to return. Default 10, maximum 100. @@ -2256,7 +2452,7 @@ Attributes: name: - Resource name of the Product to get. Format is: + Required. Resource name of the Product to get. Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.GetProductRequest) @@ -2275,8 +2471,8 @@ Attributes: product: - The Product resource which replaces the one on the server. - product.name is immutable. + Required. The Product resource which replaces the one on the + server. product.name is immutable. update_mask: The [FieldMask][google.protobuf.FieldMask] that specifies which fields to update. If update\_mask isn't specified, all @@ -2299,7 +2495,7 @@ Attributes: name: - Resource name of product to delete. Format is: + Required. Resource name of product to delete. Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.DeleteProductRequest) @@ -2318,10 +2514,10 @@ Attributes: parent: - The project in which the ProductSet should be created. Format - is ``projects/PROJECT_ID/locations/LOC_ID``. + Required. The project in which the ProductSet should be + created. Format is ``projects/PROJECT_ID/locations/LOC_ID``. product_set: - The ProductSet to create. + Required. The ProductSet to create. product_set_id: A user-supplied resource id for this ProductSet. If set, the server will attempt to use this value as the resource id. If @@ -2345,8 +2541,8 @@ Attributes: parent: - The project from which ProductSets should be listed. Format - is ``projects/PROJECT_ID/locations/LOC_ID``. + Required. The project from which ProductSets should be listed. + Format is ``projects/PROJECT_ID/locations/LOC_ID``. page_size: The maximum number of items to return. Default 10, maximum 100. @@ -2391,8 +2587,9 @@ Attributes: name: - Resource name of the ProductSet to get. Format is: ``projects - /PROJECT_ID/locations/LOG_ID/productSets/PRODUCT_SET_ID`` + Required. Resource name of the ProductSet to get. Format is: + ``projects/PROJECT_ID/locations/LOG_ID/productSets/PRODUCT_SET + _ID`` """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.GetProductSetRequest) ), @@ -2410,7 +2607,8 @@ Attributes: product_set: - The ProductSet resource which replaces the one on the server. + Required. The ProductSet resource which replaces the one on + the server. update_mask: The [FieldMask][google.protobuf.FieldMask] that specifies which fields to update. If update\_mask isn't specified, all @@ -2433,8 +2631,9 @@ Attributes: name: - Resource name of the ProductSet to delete. Format is: ``proje - cts/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`` + Required. Resource name of the ProductSet to delete. Format + is: ``projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT + _SET_ID`` """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.DeleteProductSetRequest) ), @@ -2452,12 +2651,12 @@ Attributes: parent: - Resource name of the product in which to create the reference - image. Format is + Required. Resource name of the product in which to create the + reference image. Format is ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID``. reference_image: - The reference image to create. If an image ID is specified, it - is ignored. + Required. The reference image to create. If an image ID is + specified, it is ignored. reference_image_id: A user-supplied resource id for the ReferenceImage to be added. If set, the server will attempt to use this value as @@ -2481,8 +2680,8 @@ Attributes: parent: - Resource name of the product containing the reference images. - Format is + Required. Resource name of the product containing the + reference images. Format is ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID``. page_size: The maximum number of items to return. Default 10, maximum @@ -2533,9 +2732,9 @@ Attributes: name: - The resource name of the ReferenceImage to get. Format is: ` - `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/refe - renceImages/IMAGE_ID``. + Required. The resource name of the ReferenceImage to get. + Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PR + ODUCT_ID/referenceImages/IMAGE_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.GetReferenceImageRequest) ), @@ -2553,9 +2752,9 @@ Attributes: name: - The resource name of the reference image to delete. Format - is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_I - D/referenceImages/IMAGE_ID`` + Required. The resource name of the reference image to delete. + Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PR + ODUCT_ID/referenceImages/IMAGE_ID`` """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.DeleteReferenceImageRequest) ), @@ -2573,12 +2772,12 @@ Attributes: name: - The resource name for the ProductSet to modify. Format is: `` - projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_I - D`` + Required. The resource name for the ProductSet to modify. + Format is: ``projects/PROJECT_ID/locations/LOC_ID/productSets/ + PRODUCT_SET_ID`` product: - The resource name for the Product to be added to this - ProductSet. Format is: + Required. The resource name for the Product to be added to + this ProductSet. Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.AddProductToProductSetRequest) @@ -2597,12 +2796,12 @@ Attributes: name: - The resource name for the ProductSet to modify. Format is: `` - projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_I - D`` + Required. The resource name for the ProductSet to modify. + Format is: ``projects/PROJECT_ID/locations/LOC_ID/productSets/ + PRODUCT_SET_ID`` product: - The resource name for the Product to be removed from this - ProductSet. Format is: + Required. The resource name for the Product to be removed from + this ProductSet. Format is: ``projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`` """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.RemoveProductFromProductSetRequest) @@ -2621,9 +2820,9 @@ Attributes: name: - The ProductSet resource for which to retrieve Products. - Format is: ``projects/PROJECT_ID/locations/LOC_ID/productSets/ - PRODUCT_SET_ID`` + Required. The ProductSet resource for which to retrieve + Products. Format is: ``projects/PROJECT_ID/locations/LOC_ID/p + roductSets/PRODUCT_SET_ID`` page_size: The maximum number of items to return. Default 10, maximum 100. @@ -2753,10 +2952,10 @@ Attributes: parent: - The project in which the ProductSets should be imported. - Format is ``projects/PROJECT_ID/locations/LOC_ID``. + Required. The project in which the ProductSets should be + imported. Format is ``projects/PROJECT_ID/locations/LOC_ID``. input_config: - The input content for the list of requests. + Required. The input content for the list of requests. """, # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.ImportProductSetsRequest) ), @@ -2821,17 +3020,102 @@ ) _sym_db.RegisterMessage(BatchOperationMetadata) +ProductSetPurgeConfig = _reflection.GeneratedProtocolMessageType( + "ProductSetPurgeConfig", + (_message.Message,), + dict( + DESCRIPTOR=_PRODUCTSETPURGECONFIG, + __module__="google.cloud.vision_v1p4beta1.proto.product_search_service_pb2", + __doc__="""Config to control which ProductSet contains the Products to be deleted. + + + Attributes: + product_set_id: + The ProductSet that contains the Products to delete. If a + Product is a member of product\_set\_id in addition to other + ProductSets, the Product will still be deleted. + """, + # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.ProductSetPurgeConfig) + ), +) +_sym_db.RegisterMessage(ProductSetPurgeConfig) + +PurgeProductsRequest = _reflection.GeneratedProtocolMessageType( + "PurgeProductsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_PURGEPRODUCTSREQUEST, + __module__="google.cloud.vision_v1p4beta1.proto.product_search_service_pb2", + __doc__="""Request message for the ``PurgeProducts`` method. + + + Attributes: + target: + The Products to delete. + product_set_purge_config: + Specify which ProductSet contains the Products to be deleted. + delete_orphan_products: + If delete\_orphan\_products is true, all Products that are not + in any ProductSet will be deleted. + parent: + Required. The project and location in which the Products + should be deleted. Format is + ``projects/PROJECT_ID/locations/LOC_ID``. + force: + The default value is false. Override this value to true to + actually perform the purge. + """, + # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.PurgeProductsRequest) + ), +) +_sym_db.RegisterMessage(PurgeProductsRequest) + DESCRIPTOR._options = None +_PRODUCT.fields_by_name["product_category"]._options = None +_PRODUCT._options = None +_PRODUCTSET.fields_by_name["index_time"]._options = None +_PRODUCTSET.fields_by_name["index_error"]._options = None +_PRODUCTSET._options = None +_REFERENCEIMAGE.fields_by_name["uri"]._options = None +_REFERENCEIMAGE.fields_by_name["bounding_polys"]._options = None +_REFERENCEIMAGE._options = None +_CREATEPRODUCTREQUEST.fields_by_name["parent"]._options = None +_CREATEPRODUCTREQUEST.fields_by_name["product"]._options = None +_LISTPRODUCTSREQUEST.fields_by_name["parent"]._options = None +_GETPRODUCTREQUEST.fields_by_name["name"]._options = None +_UPDATEPRODUCTREQUEST.fields_by_name["product"]._options = None +_DELETEPRODUCTREQUEST.fields_by_name["name"]._options = None +_CREATEPRODUCTSETREQUEST.fields_by_name["parent"]._options = None +_CREATEPRODUCTSETREQUEST.fields_by_name["product_set"]._options = None +_LISTPRODUCTSETSREQUEST.fields_by_name["parent"]._options = None +_GETPRODUCTSETREQUEST.fields_by_name["name"]._options = None +_UPDATEPRODUCTSETREQUEST.fields_by_name["product_set"]._options = None +_DELETEPRODUCTSETREQUEST.fields_by_name["name"]._options = None +_CREATEREFERENCEIMAGEREQUEST.fields_by_name["parent"]._options = None +_CREATEREFERENCEIMAGEREQUEST.fields_by_name["reference_image"]._options = None +_LISTREFERENCEIMAGESREQUEST.fields_by_name["parent"]._options = None +_GETREFERENCEIMAGEREQUEST.fields_by_name["name"]._options = None +_DELETEREFERENCEIMAGEREQUEST.fields_by_name["name"]._options = None +_ADDPRODUCTTOPRODUCTSETREQUEST.fields_by_name["name"]._options = None +_ADDPRODUCTTOPRODUCTSETREQUEST.fields_by_name["product"]._options = None +_REMOVEPRODUCTFROMPRODUCTSETREQUEST.fields_by_name["name"]._options = None +_REMOVEPRODUCTFROMPRODUCTSETREQUEST.fields_by_name["product"]._options = None +_LISTPRODUCTSINPRODUCTSETREQUEST.fields_by_name["name"]._options = None +_IMPORTPRODUCTSETSREQUEST.fields_by_name["parent"]._options = None +_IMPORTPRODUCTSETSREQUEST.fields_by_name["input_config"]._options = None +_PURGEPRODUCTSREQUEST.fields_by_name["parent"]._options = None _PRODUCTSEARCH = _descriptor.ServiceDescriptor( name="ProductSearch", full_name="google.cloud.vision.v1p4beta1.ProductSearch", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=3399, - serialized_end=6886, + serialized_options=_b( + "\312A\025vision.googleapis.com\322A[https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-vision" + ), + serialized_start=4919, + serialized_end=9097, methods=[ _descriptor.MethodDescriptor( name="CreateProductSet", @@ -2841,7 +3125,7 @@ input_type=_CREATEPRODUCTSETREQUEST, output_type=_PRODUCTSET, serialized_options=_b( - '\202\323\344\223\002E"6/v1p4beta1/{parent=projects/*/locations/*}/productSets:\013product_set' + '\202\323\344\223\002E"6/v1p4beta1/{parent=projects/*/locations/*}/productSets:\013product_set\332A!parent,product_set,product_set_id' ), ), _descriptor.MethodDescriptor( @@ -2852,7 +3136,7 @@ input_type=_LISTPRODUCTSETSREQUEST, output_type=_LISTPRODUCTSETSRESPONSE, serialized_options=_b( - "\202\323\344\223\0028\0226/v1p4beta1/{parent=projects/*/locations/*}/productSets" + "\202\323\344\223\0028\0226/v1p4beta1/{parent=projects/*/locations/*}/productSets\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -2863,7 +3147,7 @@ input_type=_GETPRODUCTSETREQUEST, output_type=_PRODUCTSET, serialized_options=_b( - "\202\323\344\223\0028\0226/v1p4beta1/{name=projects/*/locations/*/productSets/*}" + "\202\323\344\223\0028\0226/v1p4beta1/{name=projects/*/locations/*/productSets/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -2874,7 +3158,7 @@ input_type=_UPDATEPRODUCTSETREQUEST, output_type=_PRODUCTSET, serialized_options=_b( - "\202\323\344\223\002Q2B/v1p4beta1/{product_set.name=projects/*/locations/*/productSets/*}:\013product_set" + "\202\323\344\223\002Q2B/v1p4beta1/{product_set.name=projects/*/locations/*/productSets/*}:\013product_set\332A\027product_set,update_mask" ), ), _descriptor.MethodDescriptor( @@ -2885,7 +3169,7 @@ input_type=_DELETEPRODUCTSETREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\0028*6/v1p4beta1/{name=projects/*/locations/*/productSets/*}" + "\202\323\344\223\0028*6/v1p4beta1/{name=projects/*/locations/*/productSets/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -2896,7 +3180,7 @@ input_type=_CREATEPRODUCTREQUEST, output_type=_PRODUCT, serialized_options=_b( - '\202\323\344\223\002>"3/v1p4beta1/{parent=projects/*/locations/*}/products:\007product' + '\202\323\344\223\002>"3/v1p4beta1/{parent=projects/*/locations/*}/products:\007product\332A\031parent,product,product_id' ), ), _descriptor.MethodDescriptor( @@ -2907,7 +3191,7 @@ input_type=_LISTPRODUCTSREQUEST, output_type=_LISTPRODUCTSRESPONSE, serialized_options=_b( - "\202\323\344\223\0025\0223/v1p4beta1/{parent=projects/*/locations/*}/products" + "\202\323\344\223\0025\0223/v1p4beta1/{parent=projects/*/locations/*}/products\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -2918,7 +3202,7 @@ input_type=_GETPRODUCTREQUEST, output_type=_PRODUCT, serialized_options=_b( - "\202\323\344\223\0025\0223/v1p4beta1/{name=projects/*/locations/*/products/*}" + "\202\323\344\223\0025\0223/v1p4beta1/{name=projects/*/locations/*/products/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -2929,7 +3213,7 @@ input_type=_UPDATEPRODUCTREQUEST, output_type=_PRODUCT, serialized_options=_b( - "\202\323\344\223\002F2;/v1p4beta1/{product.name=projects/*/locations/*/products/*}:\007product" + "\202\323\344\223\002F2;/v1p4beta1/{product.name=projects/*/locations/*/products/*}:\007product\332A\023product,update_mask" ), ), _descriptor.MethodDescriptor( @@ -2940,7 +3224,7 @@ input_type=_DELETEPRODUCTREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\0025*3/v1p4beta1/{name=projects/*/locations/*/products/*}" + "\202\323\344\223\0025*3/v1p4beta1/{name=projects/*/locations/*/products/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -2951,7 +3235,7 @@ input_type=_CREATEREFERENCEIMAGEREQUEST, output_type=_REFERENCEIMAGE, serialized_options=_b( - '\202\323\344\223\002X"E/v1p4beta1/{parent=projects/*/locations/*/products/*}/referenceImages:\017reference_image' + '\202\323\344\223\002X"E/v1p4beta1/{parent=projects/*/locations/*/products/*}/referenceImages:\017reference_image\332A)parent,reference_image,reference_image_id' ), ), _descriptor.MethodDescriptor( @@ -2962,7 +3246,7 @@ input_type=_DELETEREFERENCEIMAGEREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002G*E/v1p4beta1/{name=projects/*/locations/*/products/*/referenceImages/*}" + "\202\323\344\223\002G*E/v1p4beta1/{name=projects/*/locations/*/products/*/referenceImages/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -2973,7 +3257,7 @@ input_type=_LISTREFERENCEIMAGESREQUEST, output_type=_LISTREFERENCEIMAGESRESPONSE, serialized_options=_b( - "\202\323\344\223\002G\022E/v1p4beta1/{parent=projects/*/locations/*/products/*}/referenceImages" + "\202\323\344\223\002G\022E/v1p4beta1/{parent=projects/*/locations/*/products/*}/referenceImages\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -2984,7 +3268,7 @@ input_type=_GETREFERENCEIMAGEREQUEST, output_type=_REFERENCEIMAGE, serialized_options=_b( - "\202\323\344\223\002G\022E/v1p4beta1/{name=projects/*/locations/*/products/*/referenceImages/*}" + "\202\323\344\223\002G\022E/v1p4beta1/{name=projects/*/locations/*/products/*/referenceImages/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -2995,7 +3279,7 @@ input_type=_ADDPRODUCTTOPRODUCTSETREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - '\202\323\344\223\002F"A/v1p4beta1/{name=projects/*/locations/*/productSets/*}:addProduct:\001*' + '\202\323\344\223\002F"A/v1p4beta1/{name=projects/*/locations/*/productSets/*}:addProduct:\001*\332A\014name,product' ), ), _descriptor.MethodDescriptor( @@ -3006,7 +3290,7 @@ input_type=_REMOVEPRODUCTFROMPRODUCTSETREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - '\202\323\344\223\002I"D/v1p4beta1/{name=projects/*/locations/*/productSets/*}:removeProduct:\001*' + '\202\323\344\223\002I"D/v1p4beta1/{name=projects/*/locations/*/productSets/*}:removeProduct:\001*\332A\014name,product' ), ), _descriptor.MethodDescriptor( @@ -3017,7 +3301,7 @@ input_type=_LISTPRODUCTSINPRODUCTSETREQUEST, output_type=_LISTPRODUCTSINPRODUCTSETRESPONSE, serialized_options=_b( - "\202\323\344\223\002A\022?/v1p4beta1/{name=projects/*/locations/*/productSets/*}/products" + "\202\323\344\223\002A\022?/v1p4beta1/{name=projects/*/locations/*/productSets/*}/products\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3028,7 +3312,18 @@ input_type=_IMPORTPRODUCTSETSREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002B"=/v1p4beta1/{parent=projects/*/locations/*}/productSets:import:\001*' + '\202\323\344\223\002B"=/v1p4beta1/{parent=projects/*/locations/*}/productSets:import:\001*\332A\023parent,input_config\312A3\n\031ImportProductSetsResponse\022\026BatchOperationMetadata' + ), + ), + _descriptor.MethodDescriptor( + name="PurgeProducts", + full_name="google.cloud.vision.v1p4beta1.ProductSearch.PurgeProducts", + index=18, + containing_service=None, + input_type=_PURGEPRODUCTSREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002>"9/v1p4beta1/{parent=projects/*/locations/*}/products:purge:\001*\332A\006parent\312A/\n\025google.protobuf.Empty\022\026BatchOperationMetadata' ), ), ], diff --git a/vision/google/cloud/vision_v1p4beta1/proto/product_search_service_pb2_grpc.py b/vision/google/cloud/vision_v1p4beta1/proto/product_search_service_pb2_grpc.py index de2ba5996cb5..0d1f9704cb5a 100644 --- a/vision/google/cloud/vision_v1p4beta1/proto/product_search_service_pb2_grpc.py +++ b/vision/google/cloud/vision_v1p4beta1/proto/product_search_service_pb2_grpc.py @@ -127,6 +127,11 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_product__search__service__pb2.ImportProductSetsRequest.SerializeToString, response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, ) + self.PurgeProducts = channel.unary_unary( + "/google.cloud.vision.v1p4beta1.ProductSearch/PurgeProducts", + request_serializer=google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_product__search__service__pb2.PurgeProductsRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) class ProductSearchServicer(object): @@ -204,10 +209,6 @@ def DeleteProductSet(self, request, context): ProductSet are not deleted. The actual image files are not deleted from Google Cloud Storage. - - Possible errors: - - * Returns NOT_FOUND if the ProductSet does not exist. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -276,10 +277,6 @@ def DeleteProduct(self, request, context): Metadata of the product and all its images will be deleted right away, but search queries against ProductSets containing the product may still work until all related caches are refreshed. - - Possible errors: - - * Returns NOT_FOUND if the product does not exist. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -318,10 +315,6 @@ def DeleteReferenceImage(self, request, context): caches are refreshed. The actual image files are not deleted from Google Cloud Storage. - - Possible errors: - - * Returns NOT_FOUND if the reference image does not exist. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -367,10 +360,6 @@ def AddProductToProductSet(self, request, context): def RemoveProductFromProductSet(self, request, context): """Removes a Product from the specified ProductSet. - - Possible errors: - - * Returns NOT_FOUND If the Product is not found under the ProductSet. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -406,6 +395,36 @@ def ImportProductSets(self, request, context): context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") + def PurgeProducts(self, request, context): + """Asynchronous API to delete all Products in a ProductSet or all Products + that are in no ProductSet. + + If a Product is a member of the specified ProductSet in addition to other + ProductSets, the Product will still be deleted. + + It is recommended to not delete the specified ProductSet until after this + operation has completed. It is also recommended to not add any of the + Products involved in the batch delete to a new ProductSet while this + operation is running because those Products may still end up deleted. + + It's not possible to undo the PurgeProducts operation. Therefore, it is + recommended to keep the csv files used in ImportProductSets (if that was + how you originally built the Product Set) before starting PurgeProducts, in + case you need to re-import the data after deletion. + + If the plan is to purge all of the Products from a ProductSet and then + re-use the empty ProductSet to re-import new Products into the empty + ProductSet, you must wait until the PurgeProducts operation has finished + for that ProductSet. + + The [google.longrunning.Operation][google.longrunning.Operation] API can be + used to keep track of the progress and results of the request. + `Operation.metadata` contains `BatchOperationMetadata`. (progress) + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + def add_ProductSearchServicer_to_server(servicer, server): rpc_method_handlers = { @@ -499,6 +518,11 @@ def add_ProductSearchServicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_product__search__service__pb2.ImportProductSetsRequest.FromString, response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, ), + "PurgeProducts": grpc.unary_unary_rpc_method_handler( + servicer.PurgeProducts, + request_deserializer=google_dot_cloud_dot_vision__v1p4beta1_dot_proto_dot_product__search__service__pb2.PurgeProductsRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( "google.cloud.vision.v1p4beta1.ProductSearch", rpc_method_handlers diff --git a/vision/google/cloud/vision_v1p4beta1/proto/text_annotation.proto b/vision/google/cloud/vision_v1p4beta1/proto/text_annotation.proto index 542677f271e3..fbc35bb5458f 100644 --- a/vision/google/cloud/vision_v1p4beta1/proto/text_annotation.proto +++ b/vision/google/cloud/vision_v1p4beta1/proto/text_annotation.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -194,7 +194,7 @@ message Paragraph { // and the vertex order will still be (0, 1, 2, 3). BoundingPoly bounding_box = 2; - // List of words in this paragraph. + // List of all words in this paragraph. repeated Word words = 3; // Confidence of the OCR results for the paragraph. Range [0, 1]. @@ -250,7 +250,7 @@ message Symbol { // 2----3 // | | // 1----0 - // and the vertice order will still be (0, 1, 2, 3). + // and the vertex order will still be (0, 1, 2, 3). BoundingPoly bounding_box = 2; // The actual UTF-8 representation of the symbol. diff --git a/vision/google/cloud/vision_v1p4beta1/proto/text_annotation_pb2.py b/vision/google/cloud/vision_v1p4beta1/proto/text_annotation_pb2.py index 5eef841be026..435e1cfa0073 100644 --- a/vision/google/cloud/vision_v1p4beta1/proto/text_annotation_pb2.py +++ b/vision/google/cloud/vision_v1p4beta1/proto/text_annotation_pb2.py @@ -1058,7 +1058,7 @@ corner it becomes: 2----3 \| \| 1----0 and the vertex order will still be (0, 1, 2, 3). words: - List of words in this paragraph. + List of all words in this paragraph. confidence: Confidence of the OCR results for the paragraph. Range [0, 1]. """, @@ -1120,7 +1120,7 @@ text is read in the 'natural' orientation. For example: \* when the text is horizontal it might look like: 0----1 \| \| 3 ----2 \* when it's rotated 180 degrees around the top-left - corner it becomes: 2----3 \| \| 1----0 and the vertice order + corner it becomes: 2----3 \| \| 1----0 and the vertex order will still be (0, 1, 2, 3). text: The actual UTF-8 representation of the symbol. diff --git a/vision/google/cloud/vision_v1p4beta1/proto/web_detection.proto b/vision/google/cloud/vision_v1p4beta1/proto/web_detection.proto index 15822563b847..446a937f1c8e 100644 --- a/vision/google/cloud/vision_v1p4beta1/proto/web_detection.proto +++ b/vision/google/cloud/vision_v1p4beta1/proto/web_detection.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -50,17 +50,6 @@ message WebDetection { float score = 2; } - // Label to provide extra metadata for the web detection. - message WebLabel { - // Label for extra metadata. - string label = 1; - - // The BCP-47 language code for `label`, such as "en-US" or "sr-Latn". - // For more information, see - // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. - string language_code = 2; - } - // Metadata for web pages. message WebPage { // The result web page URL. @@ -83,6 +72,17 @@ message WebDetection { repeated WebImage partial_matching_images = 5; } + // Label to provide extra metadata for the web detection. + message WebLabel { + // Label for extra metadata. + string label = 1; + + // The BCP-47 language code for `label`, such as "en-US" or "sr-Latn". + // For more information, see + // http://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + string language_code = 2; + } + // Deduced entities from similar images on the Internet. repeated WebEntity web_entities = 1; diff --git a/vision/google/cloud/vision_v1p4beta1/proto/web_detection_pb2.py b/vision/google/cloud/vision_v1p4beta1/proto/web_detection_pb2.py index 5a318c648b4d..b87762541a5a 100644 --- a/vision/google/cloud/vision_v1p4beta1/proto/web_detection_pb2.py +++ b/vision/google/cloud/vision_v1p4beta1/proto/web_detection_pb2.py @@ -26,7 +26,7 @@ "\n!com.google.cloud.vision.v1p4beta1B\021WebDetectionProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\370\001\001\242\002\004GCVN" ), serialized_pb=_b( - '\n7google/cloud/vision_v1p4beta1/proto/web_detection.proto\x12\x1dgoogle.cloud.vision.v1p4beta1\x1a\x1cgoogle/api/annotations.proto"\x8c\x07\n\x0cWebDetection\x12K\n\x0cweb_entities\x18\x01 \x03(\x0b\x32\x35.google.cloud.vision.v1p4beta1.WebDetection.WebEntity\x12R\n\x14\x66ull_matching_images\x18\x02 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.WebDetection.WebImage\x12U\n\x17partial_matching_images\x18\x03 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.WebDetection.WebImage\x12W\n\x1apages_with_matching_images\x18\x04 \x03(\x0b\x32\x33.google.cloud.vision.v1p4beta1.WebDetection.WebPage\x12U\n\x17visually_similar_images\x18\x06 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.WebDetection.WebImage\x12O\n\x11\x62\x65st_guess_labels\x18\x08 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.WebDetection.WebLabel\x1a\x42\n\tWebEntity\x12\x11\n\tentity_id\x18\x01 \x01(\t\x12\r\n\x05score\x18\x02 \x01(\x02\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x1a&\n\x08WebImage\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\r\n\x05score\x18\x02 \x01(\x02\x1a\x30\n\x08WebLabel\x12\r\n\x05label\x18\x01 \x01(\t\x12\x15\n\rlanguage_code\x18\x02 \x01(\t\x1a\xe4\x01\n\x07WebPage\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\r\n\x05score\x18\x02 \x01(\x02\x12\x12\n\npage_title\x18\x03 \x01(\t\x12R\n\x14\x66ull_matching_images\x18\x04 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.WebDetection.WebImage\x12U\n\x17partial_matching_images\x18\x05 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.WebDetection.WebImageB\x87\x01\n!com.google.cloud.vision.v1p4beta1B\x11WebDetectionProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\xf8\x01\x01\xa2\x02\x04GCVNb\x06proto3' + '\n7google/cloud/vision_v1p4beta1/proto/web_detection.proto\x12\x1dgoogle.cloud.vision.v1p4beta1\x1a\x1cgoogle/api/annotations.proto"\x8c\x07\n\x0cWebDetection\x12K\n\x0cweb_entities\x18\x01 \x03(\x0b\x32\x35.google.cloud.vision.v1p4beta1.WebDetection.WebEntity\x12R\n\x14\x66ull_matching_images\x18\x02 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.WebDetection.WebImage\x12U\n\x17partial_matching_images\x18\x03 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.WebDetection.WebImage\x12W\n\x1apages_with_matching_images\x18\x04 \x03(\x0b\x32\x33.google.cloud.vision.v1p4beta1.WebDetection.WebPage\x12U\n\x17visually_similar_images\x18\x06 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.WebDetection.WebImage\x12O\n\x11\x62\x65st_guess_labels\x18\x08 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.WebDetection.WebLabel\x1a\x42\n\tWebEntity\x12\x11\n\tentity_id\x18\x01 \x01(\t\x12\r\n\x05score\x18\x02 \x01(\x02\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x1a&\n\x08WebImage\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\r\n\x05score\x18\x02 \x01(\x02\x1a\xe4\x01\n\x07WebPage\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\r\n\x05score\x18\x02 \x01(\x02\x12\x12\n\npage_title\x18\x03 \x01(\t\x12R\n\x14\x66ull_matching_images\x18\x04 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.WebDetection.WebImage\x12U\n\x17partial_matching_images\x18\x05 \x03(\x0b\x32\x34.google.cloud.vision.v1p4beta1.WebDetection.WebImage\x1a\x30\n\x08WebLabel\x12\r\n\x05label\x18\x01 \x01(\t\x12\x15\n\rlanguage_code\x18\x02 \x01(\tB\x87\x01\n!com.google.cloud.vision.v1p4beta1B\x11WebDetectionProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/vision/v1p4beta1;vision\xf8\x01\x01\xa2\x02\x04GCVNb\x06proto3' ), dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) @@ -162,62 +162,6 @@ serialized_end=748, ) -_WEBDETECTION_WEBLABEL = _descriptor.Descriptor( - name="WebLabel", - full_name="google.cloud.vision.v1p4beta1.WebDetection.WebLabel", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="label", - full_name="google.cloud.vision.v1p4beta1.WebDetection.WebLabel.label", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="language_code", - full_name="google.cloud.vision.v1p4beta1.WebDetection.WebLabel.language_code", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=750, - serialized_end=798, -) - _WEBDETECTION_WEBPAGE = _descriptor.Descriptor( name="WebPage", full_name="google.cloud.vision.v1p4beta1.WebDetection.WebPage", @@ -324,7 +268,63 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=801, + serialized_start=751, + serialized_end=979, +) + +_WEBDETECTION_WEBLABEL = _descriptor.Descriptor( + name="WebLabel", + full_name="google.cloud.vision.v1p4beta1.WebDetection.WebLabel", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="label", + full_name="google.cloud.vision.v1p4beta1.WebDetection.WebLabel.label", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="language_code", + full_name="google.cloud.vision.v1p4beta1.WebDetection.WebLabel.language_code", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=981, serialized_end=1029, ) @@ -448,8 +448,8 @@ nested_types=[ _WEBDETECTION_WEBENTITY, _WEBDETECTION_WEBIMAGE, - _WEBDETECTION_WEBLABEL, _WEBDETECTION_WEBPAGE, + _WEBDETECTION_WEBLABEL, ], enum_types=[], serialized_options=None, @@ -463,7 +463,6 @@ _WEBDETECTION_WEBENTITY.containing_type = _WEBDETECTION _WEBDETECTION_WEBIMAGE.containing_type = _WEBDETECTION -_WEBDETECTION_WEBLABEL.containing_type = _WEBDETECTION _WEBDETECTION_WEBPAGE.fields_by_name[ "full_matching_images" ].message_type = _WEBDETECTION_WEBIMAGE @@ -471,6 +470,7 @@ "partial_matching_images" ].message_type = _WEBDETECTION_WEBIMAGE _WEBDETECTION_WEBPAGE.containing_type = _WEBDETECTION +_WEBDETECTION_WEBLABEL.containing_type = _WEBDETECTION _WEBDETECTION.fields_by_name["web_entities"].message_type = _WEBDETECTION_WEBENTITY _WEBDETECTION.fields_by_name[ "full_matching_images" @@ -531,26 +531,6 @@ # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.WebDetection.WebImage) ), ), - WebLabel=_reflection.GeneratedProtocolMessageType( - "WebLabel", - (_message.Message,), - dict( - DESCRIPTOR=_WEBDETECTION_WEBLABEL, - __module__="google.cloud.vision_v1p4beta1.proto.web_detection_pb2", - __doc__="""Label to provide extra metadata for the web detection. - - - Attributes: - label: - Label for extra metadata. - language_code: - The BCP-47 language code for ``label``, such as "en-US" or - "sr-Latn". For more information, see http://www.unicode.org/re - ports/tr35/#Unicode\_locale\_identifier. - """, - # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.WebDetection.WebLabel) - ), - ), WebPage=_reflection.GeneratedProtocolMessageType( "WebPage", (_message.Message,), @@ -579,6 +559,26 @@ # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.WebDetection.WebPage) ), ), + WebLabel=_reflection.GeneratedProtocolMessageType( + "WebLabel", + (_message.Message,), + dict( + DESCRIPTOR=_WEBDETECTION_WEBLABEL, + __module__="google.cloud.vision_v1p4beta1.proto.web_detection_pb2", + __doc__="""Label to provide extra metadata for the web detection. + + + Attributes: + label: + Label for extra metadata. + language_code: + The BCP-47 language code for ``label``, such as "en-US" or + "sr-Latn". For more information, see http://www.unicode.org/re + ports/tr35/#Unicode\_locale\_identifier. + """, + # @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.WebDetection.WebLabel) + ), + ), DESCRIPTOR=_WEBDETECTION, __module__="google.cloud.vision_v1p4beta1.proto.web_detection_pb2", __doc__="""Relevant information for the image from the Internet. @@ -609,8 +609,8 @@ _sym_db.RegisterMessage(WebDetection) _sym_db.RegisterMessage(WebDetection.WebEntity) _sym_db.RegisterMessage(WebDetection.WebImage) -_sym_db.RegisterMessage(WebDetection.WebLabel) _sym_db.RegisterMessage(WebDetection.WebPage) +_sym_db.RegisterMessage(WebDetection.WebLabel) DESCRIPTOR._options = None diff --git a/vision/google/cloud/vision_v1p4beta1/types.py b/vision/google/cloud/vision_v1p4beta1/types.py index 6949fdf83948..c5d4ef429dd7 100644 --- a/vision/google/cloud/vision_v1p4beta1/types.py +++ b/vision/google/cloud/vision_v1p4beta1/types.py @@ -20,6 +20,7 @@ from google.api_core.protobuf_helpers import get_messages +from google.cloud.vision_v1p4beta1.proto import face_pb2 from google.cloud.vision_v1p4beta1.proto import geometry_pb2 from google.cloud.vision_v1p4beta1.proto import image_annotator_pb2 from google.cloud.vision_v1p4beta1.proto import product_search_pb2 @@ -50,6 +51,7 @@ ] _local_modules = [ + face_pb2, geometry_pb2, image_annotator_pb2, product_search_pb2, diff --git a/vision/synth.metadata b/vision/synth.metadata index e33b7b952bad..3e9809cfe9f0 100644 --- a/vision/synth.metadata +++ b/vision/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-16T12:45:12.445594Z", + "updateTime": "2019-11-06T13:44:16.072107Z", "sources": [ { "generator": { "name": "artman", - "version": "0.33.0", - "dockerImage": "googleapis/artman@sha256:c6231efb525569736226b1f7af7565dbc84248efafb3692a5bb1d2d8a7975d53" + "version": "0.41.0", + "dockerImage": "googleapis/artman@sha256:75b38a3b073a7b243545f2332463096624c802bb1e56b8cb6f22ba1ecd325fa9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "2a02e33c79cbf23d316c57e1c78f915e1d905eee", - "internalRef": "263682410" + "sha": "5691fcb7c1a926b52577aa1834f31d9c50efda54", + "internalRef": "278731899" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/vision/tests/system.py b/vision/tests/system.py index ad34c47b6575..28c8a8251442 100644 --- a/vision/tests/system.py +++ b/vision/tests/system.py @@ -81,7 +81,7 @@ def test_detect_logos_content(self): # Check to ensure we got what we expect. assert len(response.logo_annotations) == 1 - assert response.logo_annotations[0].description == "google" + assert response.logo_annotations[0].description.lower() == "google" def test_detect_logos_file_handler(self): # Get a file handler, and make the request using it. @@ -90,7 +90,7 @@ def test_detect_logos_file_handler(self): # Check to ensure we got what we expect. assert len(response.logo_annotations) == 1 - assert response.logo_annotations[0].description == "google" + assert response.logo_annotations[0].description.lower() == "google" def test_detect_logos_filename(self): # Make the request with the filename directly. @@ -98,7 +98,7 @@ def test_detect_logos_filename(self): # Check to ensure we got what we expect. assert len(response.logo_annotations) == 1 - assert response.logo_annotations[0].description == "google" + assert response.logo_annotations[0].description.lower() == "google" def test_detect_logos_gcs(self): # Upload the image to Google Cloud Storage. @@ -121,7 +121,7 @@ def test_detect_logos_gcs(self): # Check the response. assert len(response.logo_annotations) == 1 - assert response.logo_annotations[0].description == "google" + assert response.logo_annotations[0].description.lower() == "google" def test_detect_logos_async(self): # Upload the image to Google Cloud Storage. @@ -177,7 +177,7 @@ def test_detect_logos_async(self): assert len(responses) == 1 logo_annotations = responses[0]["logoAnnotations"] assert len(logo_annotations) == 1 - assert logo_annotations[0]["description"] == "google" + assert logo_annotations[0]["description"].lower() == "google" class TestVisionClientFiles(VisionSystemTestBase): diff --git a/vision/tests/unit/gapic/v1p4beta1/test_product_search_client_v1p4beta1.py b/vision/tests/unit/gapic/v1p4beta1/test_product_search_client_v1p4beta1.py index 1846fe24822b..9f854f842b41 100644 --- a/vision/tests/unit/gapic/v1p4beta1/test_product_search_client_v1p4beta1.py +++ b/vision/tests/unit/gapic/v1p4beta1/test_product_search_client_v1p4beta1.py @@ -856,3 +856,36 @@ def test_import_product_sets_exception(self): response = client.import_product_sets(parent, input_config) exception = response.exception() assert exception.errors[0] == error + + def test_purge_products(self): + # Setup Expected Response + name = "name3373707" + done = True + expected_response = {"name": name, "done": done} + expected_response = operations_pb2.Operation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = vision_v1p4beta1.ProductSearchClient() + + response = client.purge_products() + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = product_search_service_pb2.PurgeProductsRequest() + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_purge_products_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = vision_v1p4beta1.ProductSearchClient() + + with pytest.raises(CustomException): + client.purge_products() diff --git a/webrisk/docs/conf.py b/webrisk/docs/conf.py index 928f5b98c6bf..0b6fd1a76b85 100644 --- a/webrisk/docs/conf.py +++ b/webrisk/docs/conf.py @@ -264,7 +264,7 @@ u"google-cloud-webrisk Documentation", author, "manual", - ) + ), ] # The name of an image file (relative to this directory) to place at the top of @@ -320,7 +320,7 @@ "google-cloud-webrisk", "GAPIC library for the {metadata.shortName} v1beta1 service", "APIs", - ) + ), ] # Documents to append as an appendix to all manuals. @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/webrisk/google/cloud/webrisk.py b/webrisk/google/cloud/webrisk.py index 6b9be6efb038..5ed7f8340456 100644 --- a/webrisk/google/cloud/webrisk.py +++ b/webrisk/google/cloud/webrisk.py @@ -22,4 +22,8 @@ from google.cloud.webrisk_v1beta1 import types -__all__ = ("enums", "types", "WebRiskServiceV1Beta1Client") +__all__ = ( + "enums", + "types", + "WebRiskServiceV1Beta1Client", +) diff --git a/webrisk/google/cloud/webrisk_v1beta1/__init__.py b/webrisk/google/cloud/webrisk_v1beta1/__init__.py index a02bc357affc..823fd3656c38 100644 --- a/webrisk/google/cloud/webrisk_v1beta1/__init__.py +++ b/webrisk/google/cloud/webrisk_v1beta1/__init__.py @@ -29,4 +29,8 @@ class WebRiskServiceV1Beta1Client( enums = enums -__all__ = ("enums", "types", "WebRiskServiceV1Beta1Client") +__all__ = ( + "enums", + "types", + "WebRiskServiceV1Beta1Client", +) diff --git a/webrisk/google/cloud/webrisk_v1beta1/gapic/transports/web_risk_service_v1_beta1_grpc_transport.py b/webrisk/google/cloud/webrisk_v1beta1/gapic/transports/web_risk_service_v1_beta1_grpc_transport.py index 773c0afc9601..8e78eafa37b0 100644 --- a/webrisk/google/cloud/webrisk_v1beta1/gapic/transports/web_risk_service_v1_beta1_grpc_transport.py +++ b/webrisk/google/cloud/webrisk_v1beta1/gapic/transports/web_risk_service_v1_beta1_grpc_transport.py @@ -53,7 +53,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -74,7 +74,7 @@ def __init__( self._stubs = { "web_risk_service_v1_beta1_stub": webrisk_pb2_grpc.WebRiskServiceV1Beta1Stub( channel - ) + ), } @classmethod diff --git a/webrisk/google/cloud/webrisk_v1beta1/gapic/web_risk_service_v1_beta1_client.py b/webrisk/google/cloud/webrisk_v1beta1/gapic/web_risk_service_v1_beta1_client.py index 93cefba2b5d1..66e5179318f5 100644 --- a/webrisk/google/cloud/webrisk_v1beta1/gapic/web_risk_service_v1_beta1_client.py +++ b/webrisk/google/cloud/webrisk_v1beta1/gapic/web_risk_service_v1_beta1_client.py @@ -36,7 +36,7 @@ from google.cloud.webrisk_v1beta1.proto import webrisk_pb2_grpc -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-webrisk").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-webrisk",).version class WebRiskServiceV1Beta1Client(object): @@ -159,12 +159,12 @@ def __init__( self.transport = transport else: self.transport = web_risk_service_v1_beta1_grpc_transport.WebRiskServiceV1Beta1GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -175,7 +175,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -212,8 +212,8 @@ def compute_threat_list_diff( >>> response = client.compute_threat_list_diff(threat_type, constraints) Args: - threat_type (~google.cloud.webrisk_v1beta1.types.ThreatType): Required. The ThreatList to update. - constraints (Union[dict, ~google.cloud.webrisk_v1beta1.types.Constraints]): The constraints associated with this request. + threat_type (~google.cloud.webrisk_v1beta1.types.ThreatType): The ThreatList to update. + constraints (Union[dict, ~google.cloud.webrisk_v1beta1.types.Constraints]): Required. The constraints associated with this request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.webrisk_v1beta1.types.Constraints` @@ -284,7 +284,7 @@ def search_uris( >>> response = client.search_uris(uri, threat_types) Args: - uri (str): The URI to be checked for matches. + uri (str): Required. The URI to be checked for matches. threat_types (list[~google.cloud.webrisk_v1beta1.types.ThreatType]): Required. The ThreatLists to search in. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -316,15 +316,15 @@ def search_uris( client_info=self._client_info, ) - request = webrisk_pb2.SearchUrisRequest(uri=uri, threat_types=threat_types) + request = webrisk_pb2.SearchUrisRequest(uri=uri, threat_types=threat_types,) return self._inner_api_calls["search_uris"]( request, retry=retry, timeout=timeout, metadata=metadata ) def search_hashes( self, + threat_types, hash_prefix=None, - threat_types=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -338,15 +338,19 @@ def search_hashes( Example: >>> from google.cloud import webrisk_v1beta1 + >>> from google.cloud.webrisk_v1beta1 import enums >>> >>> client = webrisk_v1beta1.WebRiskServiceV1Beta1Client() >>> - >>> response = client.search_hashes() + >>> # TODO: Initialize `threat_types`: + >>> threat_types = [] + >>> + >>> response = client.search_hashes(threat_types) Args: + threat_types (list[~google.cloud.webrisk_v1beta1.types.ThreatType]): Required. The ThreatLists to search in. hash_prefix (bytes): A hash prefix, consisting of the most significant 4-32 bytes of a SHA256 hash. For JSON requests, this field is base64-encoded. - threat_types (list[~google.cloud.webrisk_v1beta1.types.ThreatType]): Required. The ThreatLists to search in. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -378,7 +382,7 @@ def search_hashes( ) request = webrisk_pb2.SearchHashesRequest( - hash_prefix=hash_prefix, threat_types=threat_types + threat_types=threat_types, hash_prefix=hash_prefix, ) return self._inner_api_calls["search_hashes"]( request, retry=retry, timeout=timeout, metadata=metadata diff --git a/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk.proto b/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk.proto index c6b5f7c90ce8..7f8021d16b3e 100644 --- a/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk.proto +++ b/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk.proto @@ -18,6 +18,8 @@ syntax = "proto3"; package google.cloud.webrisk.v1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; option csharp_namespace = "Google.Cloud.WebRisk.V1Beta1"; @@ -31,12 +33,15 @@ option php_namespace = "Google\\Cloud\\WebRisk\\V1beta1"; // Web Risk v1beta1 API defines an interface to detect malicious URLs on your // website and in client applications. service WebRiskServiceV1Beta1 { + option (google.api.default_host) = "webrisk.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Gets the most recent threat list diffs. - rpc ComputeThreatListDiff(ComputeThreatListDiffRequest) - returns (ComputeThreatListDiffResponse) { + rpc ComputeThreatListDiff(ComputeThreatListDiffRequest) returns (ComputeThreatListDiffResponse) { option (google.api.http) = { get: "/v1beta1/threatLists:computeDiff" }; + option (google.api.method_signature) = "threat_type,version_token,constraints"; } // This method is used to check whether a URI is on a given threatList. @@ -44,6 +49,7 @@ service WebRiskServiceV1Beta1 { option (google.api.http) = { get: "/v1beta1/uris:search" }; + option (google.api.method_signature) = "uri,threat_types"; } // Gets the full hashes that match the requested hash prefix. @@ -55,6 +61,7 @@ service WebRiskServiceV1Beta1 { option (google.api.http) = { get: "/v1beta1/hashes:search" }; + option (google.api.method_signature) = "hash_prefix,threat_types"; } } @@ -76,15 +83,15 @@ message ComputeThreatListDiffRequest { repeated CompressionType supported_compressions = 3; } - // Required. The ThreatList to update. - ThreatType threat_type = 1; + // The ThreatList to update. + ThreatType threat_type = 1 [(google.api.field_behavior) = REQUIRED]; // The current version token of the client for the requested list (the // client version that was received from the last successful diff). bytes version_token = 2; - // The constraints associated with this request. - Constraints constraints = 3; + // Required. The constraints associated with this request. + Constraints constraints = 3 [(google.api.field_behavior) = REQUIRED]; } message ComputeThreatListDiffResponse { @@ -109,7 +116,7 @@ message ComputeThreatListDiffResponse { RESET = 2; } - // The type of response. This may indicate that an action is required by the + // The type of response. This may indicate that an action must be taken by the // client when the response is received. ResponseType response_type = 4; @@ -138,11 +145,11 @@ message ComputeThreatListDiffResponse { // Request to check URI entries against threatLists. message SearchUrisRequest { - // The URI to be checked for matches. - string uri = 1; + // Required. The URI to be checked for matches. + string uri = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The ThreatLists to search in. - repeated ThreatType threat_types = 2; + repeated ThreatType threat_types = 2 [(google.api.field_behavior) = REQUIRED]; } message SearchUrisResponse { @@ -167,7 +174,7 @@ message SearchHashesRequest { bytes hash_prefix = 1; // Required. The ThreatLists to search in. - repeated ThreatType threat_types = 2; + repeated ThreatType threat_types = 2 [(google.api.field_behavior) = REQUIRED]; } message SearchHashesResponse { diff --git a/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk_pb2.py b/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk_pb2.py index 7b61ac5aa53a..c4e7f392a921 100644 --- a/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk_pb2.py +++ b/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk_pb2.py @@ -17,6 +17,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -28,10 +30,12 @@ "\n\032com.google.webrisk.v1beta1B\014WebRiskProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/webrisk/v1beta1;webrisk\242\002\004GCWR\252\002\034Google.Cloud.WebRisk.V1Beta1\312\002\034Google\\Cloud\\WebRisk\\V1beta1" ), serialized_pb=_b( - '\n0google/cloud/webrisk_v1beta1/proto/webrisk.proto\x12\x1cgoogle.cloud.webrisk.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe8\x02\n\x1c\x43omputeThreatListDiffRequest\x12=\n\x0bthreat_type\x18\x01 \x01(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType\x12\x15\n\rversion_token\x18\x02 \x01(\x0c\x12[\n\x0b\x63onstraints\x18\x03 \x01(\x0b\x32\x46.google.cloud.webrisk.v1beta1.ComputeThreatListDiffRequest.Constraints\x1a\x94\x01\n\x0b\x43onstraints\x12\x18\n\x10max_diff_entries\x18\x01 \x01(\x05\x12\x1c\n\x14max_database_entries\x18\x02 \x01(\x05\x12M\n\x16supported_compressions\x18\x03 \x03(\x0e\x32-.google.cloud.webrisk.v1beta1.CompressionType"\x9a\x04\n\x1d\x43omputeThreatListDiffResponse\x12_\n\rresponse_type\x18\x04 \x01(\x0e\x32H.google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse.ResponseType\x12\x45\n\tadditions\x18\x05 \x01(\x0b\x32\x32.google.cloud.webrisk.v1beta1.ThreatEntryAdditions\x12\x43\n\x08removals\x18\x06 \x01(\x0b\x32\x31.google.cloud.webrisk.v1beta1.ThreatEntryRemovals\x12\x19\n\x11new_version_token\x18\x07 \x01(\x0c\x12V\n\x08\x63hecksum\x18\x08 \x01(\x0b\x32\x44.google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse.Checksum\x12\x39\n\x15recommended_next_diff\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x1a\n\x08\x43hecksum\x12\x0e\n\x06sha256\x18\x01 \x01(\x0c"B\n\x0cResponseType\x12\x1d\n\x19RESPONSE_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x44IFF\x10\x01\x12\t\n\x05RESET\x10\x02"`\n\x11SearchUrisRequest\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12>\n\x0cthreat_types\x18\x02 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType"\xde\x01\n\x12SearchUrisResponse\x12J\n\x06threat\x18\x01 \x01(\x0b\x32:.google.cloud.webrisk.v1beta1.SearchUrisResponse.ThreatUri\x1a|\n\tThreatUri\x12>\n\x0cthreat_types\x18\x01 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType\x12/\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"j\n\x13SearchHashesRequest\x12\x13\n\x0bhash_prefix\x18\x01 \x01(\x0c\x12>\n\x0cthreat_types\x18\x02 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType"\xae\x02\n\x14SearchHashesResponse\x12N\n\x07threats\x18\x01 \x03(\x0b\x32=.google.cloud.webrisk.v1beta1.SearchHashesResponse.ThreatHash\x12\x38\n\x14negative_expire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x8b\x01\n\nThreatHash\x12>\n\x0cthreat_types\x18\x01 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType\x12\x0c\n\x04hash\x18\x02 \x01(\x0c\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\x99\x01\n\x14ThreatEntryAdditions\x12;\n\nraw_hashes\x18\x01 \x03(\x0b\x32\'.google.cloud.webrisk.v1beta1.RawHashes\x12\x44\n\x0brice_hashes\x18\x02 \x01(\x0b\x32/.google.cloud.webrisk.v1beta1.RiceDeltaEncoding"\x9b\x01\n\x13ThreatEntryRemovals\x12=\n\x0braw_indices\x18\x01 \x01(\x0b\x32(.google.cloud.webrisk.v1beta1.RawIndices\x12\x45\n\x0crice_indices\x18\x02 \x01(\x0b\x32/.google.cloud.webrisk.v1beta1.RiceDeltaEncoding"\x1d\n\nRawIndices\x12\x0f\n\x07indices\x18\x01 \x03(\x05"4\n\tRawHashes\x12\x13\n\x0bprefix_size\x18\x01 \x01(\x05\x12\x12\n\nraw_hashes\x18\x02 \x01(\x0c"k\n\x11RiceDeltaEncoding\x12\x13\n\x0b\x66irst_value\x18\x01 \x01(\x03\x12\x16\n\x0erice_parameter\x18\x02 \x01(\x05\x12\x13\n\x0b\x65ntry_count\x18\x03 \x01(\x05\x12\x14\n\x0c\x65ncoded_data\x18\x04 \x01(\x0c*e\n\nThreatType\x12\x1b\n\x17THREAT_TYPE_UNSPECIFIED\x10\x00\x12\x0b\n\x07MALWARE\x10\x01\x12\x16\n\x12SOCIAL_ENGINEERING\x10\x02\x12\x15\n\x11UNWANTED_SOFTWARE\x10\x03*F\n\x0f\x43ompressionType\x12 \n\x1c\x43OMPRESSION_TYPE_UNSPECIFIED\x10\x00\x12\x07\n\x03RAW\x10\x01\x12\x08\n\x04RICE\x10\x02\x32\xfc\x03\n\x15WebRiskServiceV1Beta1\x12\xba\x01\n\x15\x43omputeThreatListDiff\x12:.google.cloud.webrisk.v1beta1.ComputeThreatListDiffRequest\x1a;.google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse"(\x82\xd3\xe4\x93\x02"\x12 /v1beta1/threatLists:computeDiff\x12\x8d\x01\n\nSearchUris\x12/.google.cloud.webrisk.v1beta1.SearchUrisRequest\x1a\x30.google.cloud.webrisk.v1beta1.SearchUrisResponse"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/v1beta1/uris:search\x12\x95\x01\n\x0cSearchHashes\x12\x31.google.cloud.webrisk.v1beta1.SearchHashesRequest\x1a\x32.google.cloud.webrisk.v1beta1.SearchHashesResponse"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v1beta1/hashes:searchB\xb6\x01\n\x1a\x63om.google.webrisk.v1beta1B\x0cWebRiskProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/webrisk/v1beta1;webrisk\xa2\x02\x04GCWR\xaa\x02\x1cGoogle.Cloud.WebRisk.V1Beta1\xca\x02\x1cGoogle\\Cloud\\WebRisk\\V1beta1b\x06proto3' + '\n0google/cloud/webrisk_v1beta1/proto/webrisk.proto\x12\x1cgoogle.cloud.webrisk.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xf2\x02\n\x1c\x43omputeThreatListDiffRequest\x12\x42\n\x0bthreat_type\x18\x01 \x01(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatTypeB\x03\xe0\x41\x02\x12\x15\n\rversion_token\x18\x02 \x01(\x0c\x12`\n\x0b\x63onstraints\x18\x03 \x01(\x0b\x32\x46.google.cloud.webrisk.v1beta1.ComputeThreatListDiffRequest.ConstraintsB\x03\xe0\x41\x02\x1a\x94\x01\n\x0b\x43onstraints\x12\x18\n\x10max_diff_entries\x18\x01 \x01(\x05\x12\x1c\n\x14max_database_entries\x18\x02 \x01(\x05\x12M\n\x16supported_compressions\x18\x03 \x03(\x0e\x32-.google.cloud.webrisk.v1beta1.CompressionType"\x9a\x04\n\x1d\x43omputeThreatListDiffResponse\x12_\n\rresponse_type\x18\x04 \x01(\x0e\x32H.google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse.ResponseType\x12\x45\n\tadditions\x18\x05 \x01(\x0b\x32\x32.google.cloud.webrisk.v1beta1.ThreatEntryAdditions\x12\x43\n\x08removals\x18\x06 \x01(\x0b\x32\x31.google.cloud.webrisk.v1beta1.ThreatEntryRemovals\x12\x19\n\x11new_version_token\x18\x07 \x01(\x0c\x12V\n\x08\x63hecksum\x18\x08 \x01(\x0b\x32\x44.google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse.Checksum\x12\x39\n\x15recommended_next_diff\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x1a\n\x08\x43hecksum\x12\x0e\n\x06sha256\x18\x01 \x01(\x0c"B\n\x0cResponseType\x12\x1d\n\x19RESPONSE_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x44IFF\x10\x01\x12\t\n\x05RESET\x10\x02"j\n\x11SearchUrisRequest\x12\x10\n\x03uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x43\n\x0cthreat_types\x18\x02 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatTypeB\x03\xe0\x41\x02"\xde\x01\n\x12SearchUrisResponse\x12J\n\x06threat\x18\x01 \x01(\x0b\x32:.google.cloud.webrisk.v1beta1.SearchUrisResponse.ThreatUri\x1a|\n\tThreatUri\x12>\n\x0cthreat_types\x18\x01 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType\x12/\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"o\n\x13SearchHashesRequest\x12\x13\n\x0bhash_prefix\x18\x01 \x01(\x0c\x12\x43\n\x0cthreat_types\x18\x02 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatTypeB\x03\xe0\x41\x02"\xae\x02\n\x14SearchHashesResponse\x12N\n\x07threats\x18\x01 \x03(\x0b\x32=.google.cloud.webrisk.v1beta1.SearchHashesResponse.ThreatHash\x12\x38\n\x14negative_expire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x8b\x01\n\nThreatHash\x12>\n\x0cthreat_types\x18\x01 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType\x12\x0c\n\x04hash\x18\x02 \x01(\x0c\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\x99\x01\n\x14ThreatEntryAdditions\x12;\n\nraw_hashes\x18\x01 \x03(\x0b\x32\'.google.cloud.webrisk.v1beta1.RawHashes\x12\x44\n\x0brice_hashes\x18\x02 \x01(\x0b\x32/.google.cloud.webrisk.v1beta1.RiceDeltaEncoding"\x9b\x01\n\x13ThreatEntryRemovals\x12=\n\x0braw_indices\x18\x01 \x01(\x0b\x32(.google.cloud.webrisk.v1beta1.RawIndices\x12\x45\n\x0crice_indices\x18\x02 \x01(\x0b\x32/.google.cloud.webrisk.v1beta1.RiceDeltaEncoding"\x1d\n\nRawIndices\x12\x0f\n\x07indices\x18\x01 \x03(\x05"4\n\tRawHashes\x12\x13\n\x0bprefix_size\x18\x01 \x01(\x05\x12\x12\n\nraw_hashes\x18\x02 \x01(\x0c"k\n\x11RiceDeltaEncoding\x12\x13\n\x0b\x66irst_value\x18\x01 \x01(\x03\x12\x16\n\x0erice_parameter\x18\x02 \x01(\x05\x12\x13\n\x0b\x65ntry_count\x18\x03 \x01(\x05\x12\x14\n\x0c\x65ncoded_data\x18\x04 \x01(\x0c*e\n\nThreatType\x12\x1b\n\x17THREAT_TYPE_UNSPECIFIED\x10\x00\x12\x0b\n\x07MALWARE\x10\x01\x12\x16\n\x12SOCIAL_ENGINEERING\x10\x02\x12\x15\n\x11UNWANTED_SOFTWARE\x10\x03*F\n\x0f\x43ompressionType\x12 \n\x1c\x43OMPRESSION_TYPE_UNSPECIFIED\x10\x00\x12\x07\n\x03RAW\x10\x01\x12\x08\n\x04RICE\x10\x02\x32\x9e\x05\n\x15WebRiskServiceV1Beta1\x12\xe2\x01\n\x15\x43omputeThreatListDiff\x12:.google.cloud.webrisk.v1beta1.ComputeThreatListDiffRequest\x1a;.google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse"P\x82\xd3\xe4\x93\x02"\x12 /v1beta1/threatLists:computeDiff\xda\x41%threat_type,version_token,constraints\x12\xa0\x01\n\nSearchUris\x12/.google.cloud.webrisk.v1beta1.SearchUrisRequest\x1a\x30.google.cloud.webrisk.v1beta1.SearchUrisResponse"/\x82\xd3\xe4\x93\x02\x16\x12\x14/v1beta1/uris:search\xda\x41\x10uri,threat_types\x12\xb0\x01\n\x0cSearchHashes\x12\x31.google.cloud.webrisk.v1beta1.SearchHashesRequest\x1a\x32.google.cloud.webrisk.v1beta1.SearchHashesResponse"9\x82\xd3\xe4\x93\x02\x18\x12\x16/v1beta1/hashes:search\xda\x41\x18hash_prefix,threat_types\x1aJ\xca\x41\x16webrisk.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xb6\x01\n\x1a\x63om.google.webrisk.v1beta1B\x0cWebRiskProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/webrisk/v1beta1;webrisk\xa2\x02\x04GCWR\xaa\x02\x1cGoogle.Cloud.WebRisk.V1Beta1\xca\x02\x1cGoogle\\Cloud\\WebRisk\\V1beta1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, ], ) @@ -69,8 +73,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2293, - serialized_end=2394, + serialized_start=2376, + serialized_end=2477, ) _sym_db.RegisterEnumDescriptor(_THREATTYPE) @@ -97,8 +101,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2396, - serialized_end=2466, + serialized_start=2479, + serialized_end=2549, ) _sym_db.RegisterEnumDescriptor(_COMPRESSIONTYPE) @@ -134,8 +138,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=981, - serialized_end=1047, + serialized_start=1049, + serialized_end=1115, ) _sym_db.RegisterEnumDescriptor(_COMPUTETHREATLISTDIFFRESPONSE_RESPONSETYPE) @@ -210,8 +214,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=358, - serialized_end=506, + serialized_start=426, + serialized_end=574, ) _COMPUTETHREATLISTDIFFREQUEST = _descriptor.Descriptor( @@ -236,7 +240,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -272,20 +276,20 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], extensions=[], - nested_types=[_COMPUTETHREATLISTDIFFREQUEST_CONSTRAINTS], + nested_types=[_COMPUTETHREATLISTDIFFREQUEST_CONSTRAINTS,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=146, - serialized_end=506, + serialized_start=204, + serialized_end=574, ) @@ -313,7 +317,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -323,8 +327,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=953, - serialized_end=979, + serialized_start=1021, + serialized_end=1047, ) _COMPUTETHREATLISTDIFFRESPONSE = _descriptor.Descriptor( @@ -444,15 +448,15 @@ ), ], extensions=[], - nested_types=[_COMPUTETHREATLISTDIFFRESPONSE_CHECKSUM], - enum_types=[_COMPUTETHREATLISTDIFFRESPONSE_RESPONSETYPE], + nested_types=[_COMPUTETHREATLISTDIFFRESPONSE_CHECKSUM,], + enum_types=[_COMPUTETHREATLISTDIFFRESPONSE_RESPONSETYPE,], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=509, - serialized_end=1047, + serialized_start=577, + serialized_end=1115, ) @@ -478,7 +482,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -496,7 +500,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -508,8 +512,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1049, - serialized_end=1145, + serialized_start=1117, + serialized_end=1223, ) @@ -565,8 +569,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1246, - serialized_end=1370, + serialized_start=1324, + serialized_end=1448, ) _SEARCHURISRESPONSE = _descriptor.Descriptor( @@ -593,18 +597,18 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], - nested_types=[_SEARCHURISRESPONSE_THREATURI], + nested_types=[_SEARCHURISRESPONSE_THREATURI,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1148, - serialized_end=1370, + serialized_start=1226, + serialized_end=1448, ) @@ -648,7 +652,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -660,8 +664,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1372, - serialized_end=1478, + serialized_start=1450, + serialized_end=1561, ) @@ -735,8 +739,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1644, - serialized_end=1783, + serialized_start=1727, + serialized_end=1866, ) _SEARCHHASHESRESPONSE = _descriptor.Descriptor( @@ -784,15 +788,15 @@ ), ], extensions=[], - nested_types=[_SEARCHHASHESRESPONSE_THREATHASH], + nested_types=[_SEARCHHASHESRESPONSE_THREATHASH,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1481, - serialized_end=1783, + serialized_start=1564, + serialized_end=1866, ) @@ -848,8 +852,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1786, - serialized_end=1939, + serialized_start=1869, + serialized_end=2022, ) @@ -905,8 +909,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1942, - serialized_end=2097, + serialized_start=2025, + serialized_end=2180, ) @@ -934,7 +938,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -944,8 +948,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2099, - serialized_end=2128, + serialized_start=2182, + serialized_end=2211, ) @@ -1001,8 +1005,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2130, - serialized_end=2182, + serialized_start=2213, + serialized_end=2265, ) @@ -1094,8 +1098,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2184, - serialized_end=2291, + serialized_start=2267, + serialized_end=2374, ) _COMPUTETHREATLISTDIFFREQUEST_CONSTRAINTS.fields_by_name[ @@ -1208,13 +1212,13 @@ Attributes: threat_type: - Required. The ThreatList to update. + The ThreatList to update. version_token: The current version token of the client for the requested list (the client version that was received from the last successful diff). constraints: - The constraints associated with this request. + Required. The constraints associated with this request. """, # @@protoc_insertion_point(class_scope:google.cloud.webrisk.v1beta1.ComputeThreatListDiffRequest) ), @@ -1248,8 +1252,8 @@ __doc__=""" Attributes: response_type: - The type of response. This may indicate that an action is - required by the client when the response is received. + The type of response. This may indicate that an action must be + taken by the client when the response is received. additions: A set of entries to add to a local threat type's list. removals: @@ -1287,7 +1291,7 @@ Attributes: uri: - The URI to be checked for matches. + Required. The URI to be checked for matches. threat_types: Required. The ThreatLists to search in. """, @@ -1533,15 +1537,22 @@ DESCRIPTOR._options = None +_COMPUTETHREATLISTDIFFREQUEST.fields_by_name["threat_type"]._options = None +_COMPUTETHREATLISTDIFFREQUEST.fields_by_name["constraints"]._options = None +_SEARCHURISREQUEST.fields_by_name["uri"]._options = None +_SEARCHURISREQUEST.fields_by_name["threat_types"]._options = None +_SEARCHHASHESREQUEST.fields_by_name["threat_types"]._options = None _WEBRISKSERVICEV1BETA1 = _descriptor.ServiceDescriptor( name="WebRiskServiceV1Beta1", full_name="google.cloud.webrisk.v1beta1.WebRiskServiceV1Beta1", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=2469, - serialized_end=2977, + serialized_options=_b( + "\312A\026webrisk.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=2552, + serialized_end=3222, methods=[ _descriptor.MethodDescriptor( name="ComputeThreatListDiff", @@ -1551,7 +1562,7 @@ input_type=_COMPUTETHREATLISTDIFFREQUEST, output_type=_COMPUTETHREATLISTDIFFRESPONSE, serialized_options=_b( - '\202\323\344\223\002"\022 /v1beta1/threatLists:computeDiff' + '\202\323\344\223\002"\022 /v1beta1/threatLists:computeDiff\332A%threat_type,version_token,constraints' ), ), _descriptor.MethodDescriptor( @@ -1562,7 +1573,7 @@ input_type=_SEARCHURISREQUEST, output_type=_SEARCHURISRESPONSE, serialized_options=_b( - "\202\323\344\223\002\026\022\024/v1beta1/uris:search" + "\202\323\344\223\002\026\022\024/v1beta1/uris:search\332A\020uri,threat_types" ), ), _descriptor.MethodDescriptor( @@ -1573,7 +1584,7 @@ input_type=_SEARCHHASHESREQUEST, output_type=_SEARCHHASHESRESPONSE, serialized_options=_b( - "\202\323\344\223\002\030\022\026/v1beta1/hashes:search" + "\202\323\344\223\002\030\022\026/v1beta1/hashes:search\332A\030hash_prefix,threat_types" ), ), ], diff --git a/webrisk/google/cloud/webrisk_v1beta1/types.py b/webrisk/google/cloud/webrisk_v1beta1/types.py index 81d2da5f6dd6..28ce703af790 100644 --- a/webrisk/google/cloud/webrisk_v1beta1/types.py +++ b/webrisk/google/cloud/webrisk_v1beta1/types.py @@ -24,9 +24,13 @@ from google.protobuf import timestamp_pb2 -_shared_modules = [timestamp_pb2] +_shared_modules = [ + timestamp_pb2, +] -_local_modules = [webrisk_pb2] +_local_modules = [ + webrisk_pb2, +] names = [] diff --git a/webrisk/synth.metadata b/webrisk/synth.metadata index c5119433795c..1477bbdba2f5 100644 --- a/webrisk/synth.metadata +++ b/webrisk/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:51:28.182890Z", + "updateTime": "2019-10-29T12:49:55.063315Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/webrisk/tests/unit/gapic/v1beta1/test_web_risk_service_v1_beta1_client_v1beta1.py b/webrisk/tests/unit/gapic/v1beta1/test_web_risk_service_v1_beta1_client_v1beta1.py index c44043698f86..e458f8821f0b 100644 --- a/webrisk/tests/unit/gapic/v1beta1/test_web_risk_service_v1_beta1_client_v1beta1.py +++ b/webrisk/tests/unit/gapic/v1beta1/test_web_risk_service_v1_beta1_client_v1beta1.py @@ -158,11 +158,14 @@ def test_search_hashes(self): create_channel.return_value = channel client = webrisk_v1beta1.WebRiskServiceV1Beta1Client() - response = client.search_hashes() + # Setup Request + threat_types = [] + + response = client.search_hashes(threat_types) assert expected_response == response assert len(channel.requests) == 1 - expected_request = webrisk_pb2.SearchHashesRequest() + expected_request = webrisk_pb2.SearchHashesRequest(threat_types=threat_types) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -174,5 +177,8 @@ def test_search_hashes_exception(self): create_channel.return_value = channel client = webrisk_v1beta1.WebRiskServiceV1Beta1Client() + # Setup request + threat_types = [] + with pytest.raises(CustomException): - client.search_hashes() + client.search_hashes(threat_types) diff --git a/websecurityscanner/docs/conf.py b/websecurityscanner/docs/conf.py index 17adef53a653..bd4ed2df4750 100644 --- a/websecurityscanner/docs/conf.py +++ b/websecurityscanner/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/enums.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/enums.py index 9765b3b06ce8..aef4f2f5dd71 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/enums.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/enums.py @@ -54,6 +54,13 @@ class FindingType(enum.IntEnum): https://www.google.com/about/appsecurity/learning/xss/. CLEAR_TEXT_PASSWORD (int): An application appears to be transmitting a password field in clear text. An attacker can eavesdrop network traffic and sniff the password field. + INVALID_CONTENT_TYPE (int): An application returns sensitive content with an invalid content type, + or without an 'X-Content-Type-Options: nosniff' header. + XSS_ANGULAR_CALLBACK (int): A cross-site scripting (XSS) vulnerability in AngularJS module that + occurs when a user-provided string is interpolated by Angular. + INVALID_HEADER (int): A malformed or invalid valued header. + MISSPELLED_SECURITY_HEADER_NAME (int): Misspelled security header name. + MISMATCHING_SECURITY_HEADER_VALUES (int): Mismatching values in a duplicate security header. """ FINDING_TYPE_UNSPECIFIED = 0 @@ -63,6 +70,11 @@ class FindingType(enum.IntEnum): XSS_CALLBACK = 3 XSS_ERROR = 4 CLEAR_TEXT_PASSWORD = 6 + INVALID_CONTENT_TYPE = 7 + XSS_ANGULAR_CALLBACK = 8 + INVALID_HEADER = 9 + MISSPELLED_SECURITY_HEADER_NAME = 10 + MISMATCHING_SECURITY_HEADER_VALUES = 11 class ScanConfig(object): diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/web_security_scanner_client.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/web_security_scanner_client.py index 03e217725ffa..dfc93d7269a1 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/web_security_scanner_client.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/web_security_scanner_client.py @@ -260,11 +260,9 @@ def create_scan_config( >>> response = client.create_scan_config(parent, scan_config) Args: - parent (str): Required. - The parent resource name where the scan is created, which should be a + parent (str): Required. The parent resource name where the scan is created, which should be a project resource name in the format 'projects/{projectId}'. - scan_config (Union[dict, ~google.cloud.websecurityscanner_v1alpha.types.ScanConfig]): Required. - The ScanConfig to be created. + scan_config (Union[dict, ~google.cloud.websecurityscanner_v1alpha.types.ScanConfig]): Required. The ScanConfig to be created. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.websecurityscanner_v1alpha.types.ScanConfig` @@ -338,8 +336,7 @@ def delete_scan_config( >>> client.delete_scan_config(name) Args: - name (str): Required. - The resource name of the ScanConfig to be deleted. The name follows the + name (str): Required. The resource name of the ScanConfig to be deleted. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -406,8 +403,7 @@ def get_scan_config( >>> response = client.get_scan_config(name) Args: - name (str): Required. - The resource name of the ScanConfig to be returned. The name follows the + name (str): Required. The resource name of the ScanConfig to be returned. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -490,8 +486,7 @@ def list_scan_configs( ... pass Args: - parent (str): Required. - The parent resource name, which should be a project resource name in the + parent (str): Required. The parent resource name, which should be a project resource name in the format 'projects/{projectId}'. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -587,8 +582,7 @@ def update_scan_config( >>> response = client.update_scan_config(scan_config, update_mask) Args: - scan_config (Union[dict, ~google.cloud.websecurityscanner_v1alpha.types.ScanConfig]): Required. - The ScanConfig to be updated. The name field must be set to identify the + scan_config (Union[dict, ~google.cloud.websecurityscanner_v1alpha.types.ScanConfig]): Required. The ScanConfig to be updated. The name field must be set to identify the resource to be updated. The values of fields not covered by the mask will be ignored. @@ -670,8 +664,7 @@ def start_scan_run( >>> response = client.start_scan_run(name) Args: - name (str): Required. - The resource name of the ScanConfig to be used. The name follows the + name (str): Required. The resource name of the ScanConfig to be used. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -741,8 +734,7 @@ def get_scan_run( >>> response = client.get_scan_run(name) Args: - name (str): Required. - The resource name of the ScanRun to be returned. The name follows the + name (str): Required. The resource name of the ScanRun to be returned. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -827,8 +819,7 @@ def list_scan_runs( ... pass Args: - parent (str): Required. - The parent resource name, which should be a scan resource name in the + parent (str): Required. The parent resource name, which should be a scan resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}'. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -919,8 +910,7 @@ def stop_scan_run( >>> response = client.stop_scan_run(name) Args: - name (str): Required. - The resource name of the ScanRun to be stopped. The name follows the + name (str): Required. The resource name of the ScanRun to be stopped. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1004,8 +994,7 @@ def list_crawled_urls( ... pass Args: - parent (str): Required. - The parent resource name, which should be a scan run resource name in the + parent (str): Required. The parent resource name, which should be a scan run resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. page_size (int): The maximum number of resources contained in the @@ -1097,8 +1086,7 @@ def get_finding( >>> response = client.get_finding(name) Args: - name (str): Required. - The resource name of the Finding to be returned. The name follows the + name (str): Required. The resource name of the Finding to be returned. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}/findings/{findingId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1186,12 +1174,11 @@ def list_findings( ... pass Args: - parent (str): Required. - The parent resource name, which should be a scan run resource name in the + parent (str): Required. The parent resource name, which should be a scan run resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - filter_ (str): The filter expression. The expression must be in the format: . Supported - field: 'finding\_type'. Supported operator: '='. + filter_ (str): Required. The filter expression. The expression must be in the format: . + Supported field: 'finding\_type'. Supported operator: '='. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -1281,8 +1268,7 @@ def list_finding_type_stats( >>> response = client.list_finding_type_stats(parent) Args: - parent (str): Required. - The parent resource name, which should be a scan run resource name in the + parent (str): Required. The parent resource name, which should be a scan run resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto index d4c40eae86dd..c22f4272f167 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,13 +11,12 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.websecurityscanner.v1alpha; -import "google/api/annotations.proto"; - option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner"; option java_multiple_files = true; option java_outer_classname = "CrawledUrlProto"; @@ -27,16 +26,13 @@ option java_package = "com.google.cloud.websecurityscanner.v1alpha"; // Security Scanner Service crawls the web applications, following all links // within the scope of sites, to find the URLs to test against. message CrawledUrl { - // Output only. - // The http method of the request that was used to visit the URL, in + // Output only. The http method of the request that was used to visit the URL, in // uppercase. string http_method = 1; - // Output only. - // The URL that was crawled. + // Output only. The URL that was crawled. string url = 2; - // Output only. - // The body of the request that was used to visit the URL. + // Output only. The body of the request that was used to visit the URL. string body = 3; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url_pb2.py index 8c6f9e9551b9..046134e81eb5 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url_pb2.py @@ -15,9 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto", package="google.cloud.websecurityscanner.v1alpha", @@ -26,9 +23,8 @@ "\n+com.google.cloud.websecurityscanner.v1alphaB\017CrawledUrlProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner" ), serialized_pb=_b( - "\n?google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto\x12'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto\"<\n\nCrawledUrl\x12\x13\n\x0bhttp_method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0c\n\x04\x62ody\x18\x03 \x01(\tB\x9b\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x0f\x43rawledUrlProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3" + "\n?google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto\x12'google.cloud.websecurityscanner.v1alpha\"<\n\nCrawledUrl\x12\x13\n\x0bhttp_method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0c\n\x04\x62ody\x18\x03 \x01(\tB\x9b\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x0f\x43rawledUrlProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3" ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) @@ -102,8 +98,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=138, - serialized_end=198, + serialized_start=108, + serialized_end=168, ) DESCRIPTOR.message_types_by_name["CrawledUrl"] = _CRAWLEDURL diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding.proto b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding.proto index 07140e276510..c02020f34319 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,12 +11,13 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.websecurityscanner.v1alpha; -import "google/api/annotations.proto"; +import "google/api/resource.proto"; import "google/cloud/websecurityscanner/v1alpha/finding_addon.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner"; @@ -27,6 +28,11 @@ option java_package = "com.google.cloud.websecurityscanner.v1alpha"; // A Finding resource represents a vulnerability instance identified during a // ScanRun. message Finding { + option (google.api.resource) = { + type: "websecurityscanner.googleapis.com/Finding" + pattern: "projects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}/findings/{finding}" + }; + // Types of Findings. enum FindingType { // The invalid finding type. @@ -71,70 +77,76 @@ message Finding { // An application appears to be transmitting a password field in clear text. // An attacker can eavesdrop network traffic and sniff the password field. CLEAR_TEXT_PASSWORD = 6; + + // An application returns sensitive content with an invalid content type, + // or without an 'X-Content-Type-Options: nosniff' header. + INVALID_CONTENT_TYPE = 7; + + // A cross-site scripting (XSS) vulnerability in AngularJS module that + // occurs when a user-provided string is interpolated by Angular. + XSS_ANGULAR_CALLBACK = 8; + + // A malformed or invalid valued header. + INVALID_HEADER = 9; + + // Misspelled security header name. + MISSPELLED_SECURITY_HEADER_NAME = 10; + + // Mismatching values in a duplicate security header. + MISMATCHING_SECURITY_HEADER_VALUES = 11; } - // Output only. // The resource name of the Finding. The name follows the format of // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanruns/{scanRunId}/findings/{findingId}'. // The finding IDs are generated by the system. string name = 1; - // Output only. // The type of the Finding. FindingType finding_type = 2; - // Output only. // The http method of the request that triggered the vulnerability, in // uppercase. string http_method = 3; - // Output only. // The URL produced by the server-side fuzzer and used in the request that // triggered the vulnerability. string fuzzed_url = 4; - // Output only. // The body of the request that triggered the vulnerability. string body = 5; - // Output only. // The description of the vulnerability. string description = 6; - // Output only. // The URL containing human-readable payload that user can leverage to // reproduce the vulnerability. string reproduction_url = 7; - // Output only. // If the vulnerability was originated from nested IFrame, the immediate // parent IFrame is reported. string frame_url = 8; - // Output only. // The URL where the browser lands when the vulnerability is detected. string final_url = 9; - // Output only. // The tracking ID uniquely identifies a vulnerability instance across // multiple ScanRuns. string tracking_id = 10; - // Output only. // An addon containing information about outdated libraries. OutdatedLibrary outdated_library = 11; - // Output only. // An addon containing detailed information regarding any resource causing the // vulnerability such as JavaScript sources, image, audio files, etc. ViolatingResource violating_resource = 12; - // Output only. + // An addon containing information about vulnerable or missing HTTP headers. + VulnerableHeaders vulnerable_headers = 15; + // An addon containing information about request parameters which were found // to be vulnerable. VulnerableParameters vulnerable_parameters = 13; - // Output only. // An addon containing information reported for an XSS, if any. Xss xss = 14; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto index a7d344b84bc6..3fafdb40206a 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,13 +11,12 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.websecurityscanner.v1alpha; -import "google/api/annotations.proto"; - option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner"; option java_multiple_files = true; option java_outer_classname = "FindingAddonProto"; @@ -51,6 +50,24 @@ message VulnerableParameters { repeated string parameter_names = 1; } +// Information about vulnerable or missing HTTP Headers. +message VulnerableHeaders { + // Describes a HTTP Header. + message Header { + // Header name. + string name = 1; + + // Header value. + string value = 2; + } + + // List of vulnerable headers. + repeated Header headers = 1; + + // List of missing headers. + repeated Header missing_headers = 2; +} + // Information reported for an XSS. message Xss { // Stack traces leading to the point where the XSS occurred. diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon_pb2.py index fa01f56ca8a0..d0c9962adf2a 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon_pb2.py @@ -15,9 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto", package="google.cloud.websecurityscanner.v1alpha", @@ -26,9 +23,8 @@ "\n+com.google.cloud.websecurityscanner.v1alphaB\021FindingAddonProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner" ), serialized_pb=_b( - '\nAgoogle/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto"Q\n\x0fOutdatedLibrary\x12\x14\n\x0clibrary_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x17\n\x0flearn_more_urls\x18\x03 \x03(\t"?\n\x11ViolatingResource\x12\x14\n\x0c\x63ontent_type\x18\x01 \x01(\t\x12\x14\n\x0cresource_url\x18\x02 \x01(\t"/\n\x14VulnerableParameters\x12\x17\n\x0fparameter_names\x18\x01 \x03(\t"2\n\x03Xss\x12\x14\n\x0cstack_traces\x18\x01 \x03(\t\x12\x15\n\rerror_message\x18\x02 \x01(\tB\x9d\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x11\x46indingAddonProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' + '\nAgoogle/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto\x12\'google.cloud.websecurityscanner.v1alpha"Q\n\x0fOutdatedLibrary\x12\x14\n\x0clibrary_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x17\n\x0flearn_more_urls\x18\x03 \x03(\t"?\n\x11ViolatingResource\x12\x14\n\x0c\x63ontent_type\x18\x01 \x01(\t\x12\x14\n\x0cresource_url\x18\x02 \x01(\t"/\n\x14VulnerableParameters\x12\x17\n\x0fparameter_names\x18\x01 \x03(\t"\xea\x01\n\x11VulnerableHeaders\x12R\n\x07headers\x18\x01 \x03(\x0b\x32\x41.google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.Header\x12Z\n\x0fmissing_headers\x18\x02 \x03(\x0b\x32\x41.google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.Header\x1a%\n\x06Header\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t"2\n\x03Xss\x12\x14\n\x0cstack_traces\x18\x01 \x03(\t\x12\x15\n\rerror_message\x18\x02 \x01(\tB\x9d\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x11\x46indingAddonProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) @@ -102,8 +98,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=140, - serialized_end=221, + serialized_start=110, + serialized_end=191, ) @@ -159,8 +155,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=223, - serialized_end=286, + serialized_start=193, + serialized_end=256, ) @@ -198,8 +194,121 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=288, - serialized_end=335, + serialized_start=258, + serialized_end=305, +) + + +_VULNERABLEHEADERS_HEADER = _descriptor.Descriptor( + name="Header", + full_name="google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.Header", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.Header.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.Header.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=505, + serialized_end=542, +) + +_VULNERABLEHEADERS = _descriptor.Descriptor( + name="VulnerableHeaders", + full_name="google.cloud.websecurityscanner.v1alpha.VulnerableHeaders", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="headers", + full_name="google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.headers", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="missing_headers", + full_name="google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.missing_headers", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_VULNERABLEHEADERS_HEADER], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=308, + serialized_end=542, ) @@ -255,13 +364,19 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=337, - serialized_end=387, + serialized_start=544, + serialized_end=594, ) +_VULNERABLEHEADERS_HEADER.containing_type = _VULNERABLEHEADERS +_VULNERABLEHEADERS.fields_by_name["headers"].message_type = _VULNERABLEHEADERS_HEADER +_VULNERABLEHEADERS.fields_by_name[ + "missing_headers" +].message_type = _VULNERABLEHEADERS_HEADER DESCRIPTOR.message_types_by_name["OutdatedLibrary"] = _OUTDATEDLIBRARY DESCRIPTOR.message_types_by_name["ViolatingResource"] = _VIOLATINGRESOURCE DESCRIPTOR.message_types_by_name["VulnerableParameters"] = _VULNERABLEPARAMETERS +DESCRIPTOR.message_types_by_name["VulnerableHeaders"] = _VULNERABLEHEADERS DESCRIPTOR.message_types_by_name["Xss"] = _XSS _sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -327,6 +442,45 @@ ) _sym_db.RegisterMessage(VulnerableParameters) +VulnerableHeaders = _reflection.GeneratedProtocolMessageType( + "VulnerableHeaders", + (_message.Message,), + dict( + Header=_reflection.GeneratedProtocolMessageType( + "Header", + (_message.Message,), + dict( + DESCRIPTOR=_VULNERABLEHEADERS_HEADER, + __module__="google.cloud.websecurityscanner_v1alpha.proto.finding_addon_pb2", + __doc__="""Describes a HTTP Header. + + + Attributes: + name: + Header name. + value: + Header value. + """, + # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.Header) + ), + ), + DESCRIPTOR=_VULNERABLEHEADERS, + __module__="google.cloud.websecurityscanner_v1alpha.proto.finding_addon_pb2", + __doc__="""Information about vulnerable or missing HTTP Headers. + + + Attributes: + headers: + List of vulnerable headers. + missing_headers: + List of missing headers. + """, + # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1alpha.VulnerableHeaders) + ), +) +_sym_db.RegisterMessage(VulnerableHeaders) +_sym_db.RegisterMessage(VulnerableHeaders.Header) + Xss = _reflection.GeneratedProtocolMessageType( "Xss", (_message.Message,), diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_pb2.py index da3678c40687..4eb9f1c4f84e 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_pb2.py @@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.websecurityscanner_v1alpha.proto import ( finding_addon_pb2 as google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__addon__pb2, ) @@ -29,10 +29,10 @@ "\n+com.google.cloud.websecurityscanner.v1alphaB\014FindingProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner" ), serialized_pb=_b( - '\n;google/cloud/websecurityscanner_v1alpha/proto/finding.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto\x1a\x41google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto"\xf5\x05\n\x07\x46inding\x12\x0c\n\x04name\x18\x01 \x01(\t\x12R\n\x0c\x66inding_type\x18\x02 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1alpha.Finding.FindingType\x12\x13\n\x0bhttp_method\x18\x03 \x01(\t\x12\x12\n\nfuzzed_url\x18\x04 \x01(\t\x12\x0c\n\x04\x62ody\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x18\n\x10reproduction_url\x18\x07 \x01(\t\x12\x11\n\tframe_url\x18\x08 \x01(\t\x12\x11\n\tfinal_url\x18\t \x01(\t\x12\x13\n\x0btracking_id\x18\n \x01(\t\x12R\n\x10outdated_library\x18\x0b \x01(\x0b\x32\x38.google.cloud.websecurityscanner.v1alpha.OutdatedLibrary\x12V\n\x12violating_resource\x18\x0c \x01(\x0b\x32:.google.cloud.websecurityscanner.v1alpha.ViolatingResource\x12\\\n\x15vulnerable_parameters\x18\r \x01(\x0b\x32=.google.cloud.websecurityscanner.v1alpha.VulnerableParameters\x12\x39\n\x03xss\x18\x0e \x01(\x0b\x32,.google.cloud.websecurityscanner.v1alpha.Xss"\xa1\x01\n\x0b\x46indingType\x12\x1c\n\x18\x46INDING_TYPE_UNSPECIFIED\x10\x00\x12\x11\n\rMIXED_CONTENT\x10\x01\x12\x14\n\x10OUTDATED_LIBRARY\x10\x02\x12\x11\n\rROSETTA_FLASH\x10\x05\x12\x10\n\x0cXSS_CALLBACK\x10\x03\x12\r\n\tXSS_ERROR\x10\x04\x12\x17\n\x13\x43LEAR_TEXT_PASSWORD\x10\x06\x42\x98\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x0c\x46indingProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' + '\n;google/cloud/websecurityscanner_v1alpha/proto/finding.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x19google/api/resource.proto\x1a\x41google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto"\xe9\x08\n\x07\x46inding\x12\x0c\n\x04name\x18\x01 \x01(\t\x12R\n\x0c\x66inding_type\x18\x02 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1alpha.Finding.FindingType\x12\x13\n\x0bhttp_method\x18\x03 \x01(\t\x12\x12\n\nfuzzed_url\x18\x04 \x01(\t\x12\x0c\n\x04\x62ody\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x18\n\x10reproduction_url\x18\x07 \x01(\t\x12\x11\n\tframe_url\x18\x08 \x01(\t\x12\x11\n\tfinal_url\x18\t \x01(\t\x12\x13\n\x0btracking_id\x18\n \x01(\t\x12R\n\x10outdated_library\x18\x0b \x01(\x0b\x32\x38.google.cloud.websecurityscanner.v1alpha.OutdatedLibrary\x12V\n\x12violating_resource\x18\x0c \x01(\x0b\x32:.google.cloud.websecurityscanner.v1alpha.ViolatingResource\x12V\n\x12vulnerable_headers\x18\x0f \x01(\x0b\x32:.google.cloud.websecurityscanner.v1alpha.VulnerableHeaders\x12\\\n\x15vulnerable_parameters\x18\r \x01(\x0b\x32=.google.cloud.websecurityscanner.v1alpha.VulnerableParameters\x12\x39\n\x03xss\x18\x0e \x01(\x0b\x32,.google.cloud.websecurityscanner.v1alpha.Xss"\xb6\x02\n\x0b\x46indingType\x12\x1c\n\x18\x46INDING_TYPE_UNSPECIFIED\x10\x00\x12\x11\n\rMIXED_CONTENT\x10\x01\x12\x14\n\x10OUTDATED_LIBRARY\x10\x02\x12\x11\n\rROSETTA_FLASH\x10\x05\x12\x10\n\x0cXSS_CALLBACK\x10\x03\x12\r\n\tXSS_ERROR\x10\x04\x12\x17\n\x13\x43LEAR_TEXT_PASSWORD\x10\x06\x12\x18\n\x14INVALID_CONTENT_TYPE\x10\x07\x12\x18\n\x14XSS_ANGULAR_CALLBACK\x10\x08\x12\x12\n\x0eINVALID_HEADER\x10\t\x12#\n\x1fMISSPELLED_SECURITY_HEADER_NAME\x10\n\x12&\n"MISMATCHING_SECURITY_HEADER_VALUES\x10\x0b:\x84\x01\xea\x41\x80\x01\n)websecurityscanner.googleapis.com/Finding\x12Sprojects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}/findings/{finding}B\x98\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x0c\x46indingProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__addon__pb2.DESCRIPTOR, ], ) @@ -77,11 +77,42 @@ serialized_options=None, type=None, ), + _descriptor.EnumValueDescriptor( + name="INVALID_CONTENT_TYPE", + index=7, + number=7, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="XSS_ANGULAR_CALLBACK", + index=8, + number=8, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="INVALID_HEADER", index=9, number=9, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="MISSPELLED_SECURITY_HEADER_NAME", + index=10, + number=10, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="MISMATCHING_SECURITY_HEADER_VALUES", + index=11, + number=11, + serialized_options=None, + type=None, + ), ], containing_type=None, serialized_options=None, - serialized_start=798, - serialized_end=959, + serialized_start=883, + serialized_end=1193, ) _sym_db.RegisterEnumDescriptor(_FINDING_FINDINGTYPE) @@ -309,10 +340,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="vulnerable_headers", + full_name="google.cloud.websecurityscanner.v1alpha.Finding.vulnerable_headers", + index=12, + number=15, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="vulnerable_parameters", full_name="google.cloud.websecurityscanner.v1alpha.Finding.vulnerable_parameters", - index=12, + index=13, number=13, type=11, cpp_type=10, @@ -330,7 +379,7 @@ _descriptor.FieldDescriptor( name="xss", full_name="google.cloud.websecurityscanner.v1alpha.Finding.xss", - index=13, + index=14, number=14, type=11, cpp_type=10, @@ -349,13 +398,15 @@ extensions=[], nested_types=[], enum_types=[_FINDING_FINDINGTYPE], - serialized_options=None, + serialized_options=_b( + "\352A\200\001\n)websecurityscanner.googleapis.com/Finding\022Sprojects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}/findings/{finding}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=202, - serialized_end=959, + serialized_start=199, + serialized_end=1328, ) _FINDING.fields_by_name["finding_type"].enum_type = _FINDING_FINDINGTYPE @@ -369,6 +420,11 @@ ].message_type = ( google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__addon__pb2._VIOLATINGRESOURCE ) +_FINDING.fields_by_name[ + "vulnerable_headers" +].message_type = ( + google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__addon__pb2._VULNERABLEHEADERS +) _FINDING.fields_by_name[ "vulnerable_parameters" ].message_type = ( @@ -395,48 +451,48 @@ Attributes: name: - Output only. The resource name of the Finding. The name - follows the format of 'projects/{projectId}/scanConfigs/{scanC - onfigId}/scanruns/{scanRunId}/findings/{findingId}'. The - finding IDs are generated by the system. + The resource name of the Finding. The name follows the format + of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanruns/{ + scanRunId}/findings/{findingId}'. The finding IDs are + generated by the system. finding_type: - Output only. The type of the Finding. + The type of the Finding. http_method: - Output only. The http method of the request that triggered the + The http method of the request that triggered the vulnerability, in uppercase. fuzzed_url: - Output only. The URL produced by the server-side fuzzer and - used in the request that triggered the vulnerability. + The URL produced by the server-side fuzzer and used in the + request that triggered the vulnerability. body: - Output only. The body of the request that triggered the - vulnerability. + The body of the request that triggered the vulnerability. description: - Output only. The description of the vulnerability. + The description of the vulnerability. reproduction_url: - Output only. The URL containing human-readable payload that - user can leverage to reproduce the vulnerability. + The URL containing human-readable payload that user can + leverage to reproduce the vulnerability. frame_url: - Output only. If the vulnerability was originated from nested - IFrame, the immediate parent IFrame is reported. + If the vulnerability was originated from nested IFrame, the + immediate parent IFrame is reported. final_url: - Output only. The URL where the browser lands when the - vulnerability is detected. + The URL where the browser lands when the vulnerability is + detected. tracking_id: - Output only. The tracking ID uniquely identifies a - vulnerability instance across multiple ScanRuns. + The tracking ID uniquely identifies a vulnerability instance + across multiple ScanRuns. outdated_library: - Output only. An addon containing information about outdated - libraries. + An addon containing information about outdated libraries. violating_resource: - Output only. An addon containing detailed information - regarding any resource causing the vulnerability such as - JavaScript sources, image, audio files, etc. + An addon containing detailed information regarding any + resource causing the vulnerability such as JavaScript sources, + image, audio files, etc. + vulnerable_headers: + An addon containing information about vulnerable or missing + HTTP headers. vulnerable_parameters: - Output only. An addon containing information about request - parameters which were found to be vulnerable. + An addon containing information about request parameters which + were found to be vulnerable. xss: - Output only. An addon containing information reported for an - XSS, if any. + An addon containing information reported for an XSS, if any. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1alpha.Finding) ), @@ -445,4 +501,5 @@ DESCRIPTOR._options = None +_FINDING._options = None # @@protoc_insertion_point(module_scope) diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto index 73115667f418..e46b330c627e 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,12 +11,12 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.websecurityscanner.v1alpha; -import "google/api/annotations.proto"; import "google/cloud/websecurityscanner/v1alpha/finding.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner"; @@ -27,11 +27,9 @@ option java_package = "com.google.cloud.websecurityscanner.v1alpha"; // A FindingTypeStats resource represents stats regarding a specific FindingType // of Findings under a given ScanRun. message FindingTypeStats { - // Output only. // The finding type associated with the stats. Finding.FindingType finding_type = 1; - // Output only. // The count of findings belonging to this finding type. int32 finding_count = 2; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats_pb2.py index a3e68d8e4184..94558c069bb9 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats_pb2.py @@ -15,7 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.cloud.websecurityscanner_v1alpha.proto import ( finding_pb2 as google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__pb2, ) @@ -29,11 +28,10 @@ "\n+com.google.cloud.websecurityscanner.v1alphaB\025FindingTypeStatsProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner" ), serialized_pb=_b( - "\nFgoogle/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto\x12'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto\x1a;google/cloud/websecurityscanner_v1alpha/proto/finding.proto\"}\n\x10\x46indingTypeStats\x12R\n\x0c\x66inding_type\x18\x01 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1alpha.Finding.FindingType\x12\x15\n\rfinding_count\x18\x02 \x01(\x05\x42\xa1\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x15\x46indingTypeStatsProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3" + "\nFgoogle/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto\x12'google.cloud.websecurityscanner.v1alpha\x1a;google/cloud/websecurityscanner_v1alpha/proto/finding.proto\"}\n\x10\x46indingTypeStats\x12R\n\x0c\x66inding_type\x18\x01 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1alpha.Finding.FindingType\x12\x15\n\rfinding_count\x18\x02 \x01(\x05\x42\xa1\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x15\x46indingTypeStatsProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3" ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__pb2.DESCRIPTOR, + google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__pb2.DESCRIPTOR ], ) @@ -90,8 +88,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=206, - serialized_end=331, + serialized_start=176, + serialized_end=301, ) _FINDINGTYPESTATS.fields_by_name[ @@ -114,10 +112,9 @@ Attributes: finding_type: - Output only. The finding type associated with the stats. + The finding type associated with the stats. finding_count: - Output only. The count of findings belonging to this finding - type. + The count of findings belonging to this finding type. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1alpha.FindingTypeStats) ), diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto index 144f7f2b5e4c..5497b1a0f6ac 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,12 +11,15 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.websecurityscanner.v1alpha; -import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/websecurityscanner/v1alpha/scan_run.proto"; import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner"; @@ -25,37 +28,42 @@ option java_outer_classname = "ScanConfigProto"; option java_package = "com.google.cloud.websecurityscanner.v1alpha"; // A ScanConfig resource contains the configurations to launch a scan. +// next id: 12 message ScanConfig { + option (google.api.resource) = { + type: "websecurityscanner.googleapis.com/ScanConfig" + pattern: "projects/{project}/scanConfigs/{scan_config}" + }; + // Scan authentication configuration. message Authentication { // Describes authentication configuration that uses a Google account. message GoogleAccount { - // Required. - // The user name of the Google account. - string username = 1; - - // Input only. - // Required. - // The password of the Google account. The credential is stored encrypted - // and not returned in any response. - string password = 2; + // Required. The user name of the Google account. + string username = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. Input only. The password of the Google account. The credential is stored encrypted + // and not returned in any response nor included in audit logs. + string password = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.field_behavior) = INPUT_ONLY + ]; } // Describes authentication configuration that uses a custom account. message CustomAccount { - // Required. - // The user name of the custom account. - string username = 1; - - // Input only. - // Required. - // The password of the custom account. The credential is stored encrypted - // and not returned in any response. - string password = 2; - - // Required. - // The login form URL of the website. - string login_url = 3; + // Required. The user name of the custom account. + string username = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. Input only. The password of the custom account. The credential is stored encrypted + // and not returned in any response nor included in audit logs. + string password = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.field_behavior) = INPUT_ONLY + ]; + + // Required. The login form URL of the website. + string login_url = 3 [(google.api.field_behavior) = REQUIRED]; } // Required. @@ -77,9 +85,8 @@ message ScanConfig { // immediately. google.protobuf.Timestamp schedule_time = 1; - // Required. - // The duration of time between executions in days. - int32 interval_duration_days = 2; + // Required. The duration of time between executions in days. + int32 interval_duration_days = 2 [(google.api.field_behavior) = REQUIRED]; } // Type of user agents used for scanning. @@ -115,9 +122,8 @@ message ScanConfig { // generated by the system. string name = 1; - // Required. - // The user provided display name of the ScanConfig. - string display_name = 2; + // Required. The user provided display name of the ScanConfig. + string display_name = 2 [(google.api.field_behavior) = REQUIRED]; // The maximum QPS during scanning. A valid value ranges from 5 to 20 // inclusively. If the field is unspecified or its value is set 0, server will @@ -125,9 +131,8 @@ message ScanConfig { // INVALID_ARGUMENT error. int32 max_qps = 3; - // Required. - // The starting URLs from which the scanner finds site pages. - repeated string starting_urls = 4; + // Required. The starting URLs from which the scanner finds site pages. + repeated string starting_urls = 4 [(google.api.field_behavior) = REQUIRED]; // The authentication configuration. If specified, service will use the // authentication configuration during scanning. @@ -146,4 +151,7 @@ message ScanConfig { // Set of Cloud Platforms targeted by the scan. If empty, APP_ENGINE will be // used as a default. repeated TargetPlatform target_platforms = 9; + + // Latest ScanRun if available. + ScanRun latest_run = 11; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config_pb2.py index 71de8f3294cf..c85206f135ad 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config_pb2.py @@ -15,7 +15,11 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.cloud.websecurityscanner_v1alpha.proto import ( + scan_run_pb2 as google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__run__pb2, +) from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -27,10 +31,12 @@ "\n+com.google.cloud.websecurityscanner.v1alphaB\017ScanConfigProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner" ), serialized_pb=_b( - '\n?google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xdc\x08\n\nScanConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x0f\n\x07max_qps\x18\x03 \x01(\x05\x12\x15\n\rstarting_urls\x18\x04 \x03(\t\x12Z\n\x0e\x61uthentication\x18\x05 \x01(\x0b\x32\x42.google.cloud.websecurityscanner.v1alpha.ScanConfig.Authentication\x12Q\n\nuser_agent\x18\x06 \x01(\x0e\x32=.google.cloud.websecurityscanner.v1alpha.ScanConfig.UserAgent\x12\x1a\n\x12\x62lacklist_patterns\x18\x07 \x03(\t\x12N\n\x08schedule\x18\x08 \x01(\x0b\x32<.google.cloud.websecurityscanner.v1alpha.ScanConfig.Schedule\x12\\\n\x10target_platforms\x18\t \x03(\x0e\x32\x42.google.cloud.websecurityscanner.v1alpha.ScanConfig.TargetPlatform\x1a\xf7\x02\n\x0e\x41uthentication\x12j\n\x0egoogle_account\x18\x01 \x01(\x0b\x32P.google.cloud.websecurityscanner.v1alpha.ScanConfig.Authentication.GoogleAccountH\x00\x12j\n\x0e\x63ustom_account\x18\x02 \x01(\x0b\x32P.google.cloud.websecurityscanner.v1alpha.ScanConfig.Authentication.CustomAccountH\x00\x1a\x33\n\rGoogleAccount\x12\x10\n\x08username\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x1a\x46\n\rCustomAccount\x12\x10\n\x08username\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x12\x11\n\tlogin_url\x18\x03 \x01(\tB\x10\n\x0e\x61uthentication\x1a]\n\x08Schedule\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1e\n\x16interval_duration_days\x18\x02 \x01(\x05"`\n\tUserAgent\x12\x1a\n\x16USER_AGENT_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x43HROME_LINUX\x10\x01\x12\x12\n\x0e\x43HROME_ANDROID\x10\x02\x12\x11\n\rSAFARI_IPHONE\x10\x03"N\n\x0eTargetPlatform\x12\x1f\n\x1bTARGET_PLATFORM_UNSPECIFIED\x10\x00\x12\x0e\n\nAPP_ENGINE\x10\x01\x12\x0b\n\x07\x43OMPUTE\x10\x02\x42\x9b\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x0fScanConfigProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' + '\n?google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; + + // Required. The filter expression. The expression must be in the format: // . // Supported field: 'finding_type'. // Supported operator: '='. - string filter = 2; + string filter = 2 [(google.api.field_behavior) = REQUIRED]; // A token identifying a page of results to be returned. This should be a // `next_page_token` value returned from a previous List request. @@ -335,11 +392,15 @@ message ListFindingsResponse { // Request for the `ListFindingTypeStats` method. message ListFindingTypeStatsRequest { - // Required. - // The parent resource name, which should be a scan run resource name in the + // Required. The parent resource name, which should be a scan run resource name in the // format // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; } // Response for the `ListFindingTypeStats` method. diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/web_security_scanner_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/web_security_scanner_pb2.py index 6cdfadd707ce..dd3772510a9e 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/web_security_scanner_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/web_security_scanner_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.websecurityscanner_v1alpha.proto import ( crawled_url_pb2 as google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_crawled__url__pb2, ) @@ -43,10 +46,13 @@ "\n+com.google.cloud.websecurityscanner.v1alphaB\027WebSecurityScannerProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner" ), serialized_pb=_b( - '\nHgoogle/cloud/websecurityscanner_v1alpha/proto/web_security_scanner.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto\x1a?google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto\x1a;google/cloud/websecurityscanner_v1alpha/proto/finding.proto\x1a\x46google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto\x1a?google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto\x1a/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}\x12\xd3\x01\n\x0cListFindings\x12<.google.cloud.websecurityscanner.v1alpha.ListFindingsRequest\x1a=.google.cloud.websecurityscanner.v1alpha.ListFindingsResponse"F\x82\xd3\xe4\x93\x02@\x12>/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings\x12\xf3\x01\n\x14ListFindingTypeStats\x12\x44.google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsRequest\x1a\x45.google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsResponse"N\x82\xd3\xe4\x93\x02H\x12\x46/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStatsB\xa3\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x17WebSecurityScannerProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' + '\nHgoogle/cloud/websecurityscanner_v1alpha/proto/web_security_scanner.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a?google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto\x1a;google/cloud/websecurityscanner_v1alpha/proto/finding.proto\x1a\x46google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto\x1a?google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto\x1a/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}\xda\x41\x04name\x12\xe3\x01\n\x0cListFindings\x12<.google.cloud.websecurityscanner.v1alpha.ListFindingsRequest\x1a=.google.cloud.websecurityscanner.v1alpha.ListFindingsResponse"V\x82\xd3\xe4\x93\x02@\x12>/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings\xda\x41\rparent,filter\x12\xfc\x01\n\x14ListFindingTypeStats\x12\x44.google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsRequest\x1a\x45.google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsResponse"W\x82\xd3\xe4\x93\x02H\x12\x46/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats\xda\x41\x06parent\x1aU\xca\x41!websecurityscanner.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xa3\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x17WebSecurityScannerProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_crawled__url__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__type__stats__pb2.DESCRIPTOR, @@ -80,7 +86,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -98,7 +106,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -110,8 +118,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=535, - serialized_end=650, + serialized_start=621, + serialized_end=794, ) @@ -137,7 +145,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ) ], @@ -149,8 +159,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=652, - serialized_end=691, + serialized_start=796, + serialized_end=889, ) @@ -176,7 +186,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ) ], @@ -188,8 +200,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=693, - serialized_end=729, + serialized_start=891, + serialized_end=981, ) @@ -215,7 +227,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -263,8 +277,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=731, - serialized_end=810, + serialized_start=984, + serialized_end=1116, ) @@ -290,7 +304,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -308,7 +322,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -320,8 +334,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=813, - serialized_end=961, + serialized_start=1119, + serialized_end=1277, ) @@ -377,8 +391,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=963, - serialized_end=1088, + serialized_start=1279, + serialized_end=1404, ) @@ -404,7 +418,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ) ], @@ -416,8 +432,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1090, - serialized_end=1125, + serialized_start=1406, + serialized_end=1495, ) @@ -443,7 +459,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ) ], @@ -455,8 +473,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1127, - serialized_end=1160, + serialized_start=1497, + serialized_end=1581, ) @@ -482,7 +500,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -530,8 +550,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1162, - serialized_end=1238, + serialized_start=1584, + serialized_end=1714, ) @@ -587,8 +607,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1240, - serialized_end=1356, + serialized_start=1716, + serialized_end=1832, ) @@ -614,7 +634,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ) ], @@ -626,8 +648,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1358, - serialized_end=1392, + serialized_start=1834, + serialized_end=1919, ) @@ -653,7 +675,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -701,8 +725,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1394, - serialized_end=1473, + serialized_start=1922, + serialized_end=2052, ) @@ -758,8 +782,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1475, - serialized_end=1600, + serialized_start=2054, + serialized_end=2179, ) @@ -785,7 +809,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/Finding" + ), file=DESCRIPTOR, ) ], @@ -797,8 +823,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1602, - serialized_end=1635, + serialized_start=2181, + serialized_end=2265, ) @@ -824,7 +850,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -842,7 +870,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -890,8 +918,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1637, - serialized_end=1729, + serialized_start=2268, + serialized_end=2416, ) @@ -947,8 +975,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1731, - serialized_end=1846, + serialized_start=2418, + serialized_end=2533, ) @@ -974,7 +1002,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ) ], @@ -986,8 +1016,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1848, - serialized_end=1893, + serialized_start=2535, + serialized_end=2631, ) @@ -1025,8 +1055,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1895, - serialized_end=2012, + serialized_start=2633, + serialized_end=2750, ) _CREATESCANCONFIGREQUEST.fields_by_name[ @@ -1422,8 +1452,9 @@ resource name in the format 'projects/{projectId}/scanConfigs/ {scanConfigId}/scanRuns/{scanRunId}'. filter: - The filter expression. The expression must be in the format: . - Supported field: 'finding\_type'. Supported operator: '='. + Required. The filter expression. The expression must be in the + format: . Supported field: 'finding\_type'. Supported + operator: '='. page_token: A token identifying a page of results to be returned. This should be a ``next_page_token`` value returned from a previous @@ -1500,15 +1531,33 @@ DESCRIPTOR._options = None +_CREATESCANCONFIGREQUEST.fields_by_name["parent"]._options = None +_CREATESCANCONFIGREQUEST.fields_by_name["scan_config"]._options = None +_DELETESCANCONFIGREQUEST.fields_by_name["name"]._options = None +_GETSCANCONFIGREQUEST.fields_by_name["name"]._options = None +_LISTSCANCONFIGSREQUEST.fields_by_name["parent"]._options = None +_UPDATESCANCONFIGREQUEST.fields_by_name["scan_config"]._options = None +_UPDATESCANCONFIGREQUEST.fields_by_name["update_mask"]._options = None +_STARTSCANRUNREQUEST.fields_by_name["name"]._options = None +_GETSCANRUNREQUEST.fields_by_name["name"]._options = None +_LISTSCANRUNSREQUEST.fields_by_name["parent"]._options = None +_STOPSCANRUNREQUEST.fields_by_name["name"]._options = None +_LISTCRAWLEDURLSREQUEST.fields_by_name["parent"]._options = None +_GETFINDINGREQUEST.fields_by_name["name"]._options = None +_LISTFINDINGSREQUEST.fields_by_name["parent"]._options = None +_LISTFINDINGSREQUEST.fields_by_name["filter"]._options = None +_LISTFINDINGTYPESTATSREQUEST.fields_by_name["parent"]._options = None _WEBSECURITYSCANNER = _descriptor.ServiceDescriptor( name="WebSecurityScanner", full_name="google.cloud.websecurityscanner.v1alpha.WebSecurityScanner", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=2015, - serialized_end=4655, + serialized_options=_b( + "\312A!websecurityscanner.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=2753, + serialized_end=5621, methods=[ _descriptor.MethodDescriptor( name="CreateScanConfig", @@ -1518,7 +1567,7 @@ input_type=_CREATESCANCONFIGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__config__pb2._SCANCONFIG, serialized_options=_b( - '\202\323\344\223\0027"(/v1alpha/{parent=projects/*}/scanConfigs:\013scan_config' + '\202\323\344\223\0027"(/v1alpha/{parent=projects/*}/scanConfigs:\013scan_config\332A\022parent,scan_config' ), ), _descriptor.MethodDescriptor( @@ -1529,7 +1578,7 @@ input_type=_DELETESCANCONFIGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002**(/v1alpha/{name=projects/*/scanConfigs/*}" + "\202\323\344\223\002**(/v1alpha/{name=projects/*/scanConfigs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1540,7 +1589,7 @@ input_type=_GETSCANCONFIGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__config__pb2._SCANCONFIG, serialized_options=_b( - "\202\323\344\223\002*\022(/v1alpha/{name=projects/*/scanConfigs/*}" + "\202\323\344\223\002*\022(/v1alpha/{name=projects/*/scanConfigs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1551,7 +1600,7 @@ input_type=_LISTSCANCONFIGSREQUEST, output_type=_LISTSCANCONFIGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002*\022(/v1alpha/{parent=projects/*}/scanConfigs" + "\202\323\344\223\002*\022(/v1alpha/{parent=projects/*}/scanConfigs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1562,7 +1611,7 @@ input_type=_UPDATESCANCONFIGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__config__pb2._SCANCONFIG, serialized_options=_b( - "\202\323\344\223\002C24/v1alpha/{scan_config.name=projects/*/scanConfigs/*}:\013scan_config" + "\202\323\344\223\002C24/v1alpha/{scan_config.name=projects/*/scanConfigs/*}:\013scan_config\332A\027scan_config,update_mask" ), ), _descriptor.MethodDescriptor( @@ -1573,7 +1622,7 @@ input_type=_STARTSCANRUNREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__run__pb2._SCANRUN, serialized_options=_b( - '\202\323\344\223\0023"./v1alpha/{name=projects/*/scanConfigs/*}:start:\001*' + '\202\323\344\223\0023"./v1alpha/{name=projects/*/scanConfigs/*}:start:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -1584,7 +1633,7 @@ input_type=_GETSCANRUNREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__run__pb2._SCANRUN, serialized_options=_b( - "\202\323\344\223\0025\0223/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*}" + "\202\323\344\223\0025\0223/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1595,7 +1644,7 @@ input_type=_LISTSCANRUNSREQUEST, output_type=_LISTSCANRUNSRESPONSE, serialized_options=_b( - "\202\323\344\223\0025\0223/v1alpha/{parent=projects/*/scanConfigs/*}/scanRuns" + "\202\323\344\223\0025\0223/v1alpha/{parent=projects/*/scanConfigs/*}/scanRuns\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1606,7 +1655,7 @@ input_type=_STOPSCANRUNREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__run__pb2._SCANRUN, serialized_options=_b( - '\202\323\344\223\002="8/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*}:stop:\001*' + '\202\323\344\223\002="8/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*}:stop:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -1617,7 +1666,7 @@ input_type=_LISTCRAWLEDURLSREQUEST, output_type=_LISTCRAWLEDURLSRESPONSE, serialized_options=_b( - "\202\323\344\223\002C\022A/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls" + "\202\323\344\223\002C\022A/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1628,7 +1677,7 @@ input_type=_GETFINDINGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__pb2._FINDING, serialized_options=_b( - "\202\323\344\223\002@\022>/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}" + "\202\323\344\223\002@\022>/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1639,7 +1688,7 @@ input_type=_LISTFINDINGSREQUEST, output_type=_LISTFINDINGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002@\022>/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings" + "\202\323\344\223\002@\022>/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings\332A\rparent,filter" ), ), _descriptor.MethodDescriptor( @@ -1650,7 +1699,7 @@ input_type=_LISTFINDINGTYPESTATSREQUEST, output_type=_LISTFINDINGTYPESTATSRESPONSE, serialized_options=_b( - "\202\323\344\223\002H\022F/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats" + "\202\323\344\223\002H\022F/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats\332A\006parent" ), ), ], diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/enums.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/enums.py index 38dde89453e2..244c5e058ef8 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/enums.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/enums.py @@ -88,7 +88,7 @@ class Code(enum.IntEnum): """ Output only. Defines an error reason code. - Next id: 43 + Next id: 44 Attributes: CODE_UNSPECIFIED (int): There is no error. @@ -121,6 +121,7 @@ class Code(enum.IntEnum): FINDING_TYPE_UNSPECIFIED (int): Finding type value is not specified in the list findings request. FORBIDDEN_TO_SCAN_COMPUTE (int): Scan targets Compute Engine, yet current project was not whitelisted for Google Compute Engine Scanning Alpha access. + FORBIDDEN_UPDATE_TO_MANAGED_SCAN (int): User tries to update managed scan MALFORMED_FILTER (int): The supplied filter is malformed. For example, it can not be parsed, does not have a filter type in expression, or the same filter type appears more than once. @@ -171,6 +172,7 @@ class Code(enum.IntEnum): FAILED_TO_AUTHENTICATE_TO_TARGET = 19 FINDING_TYPE_UNSPECIFIED = 20 FORBIDDEN_TO_SCAN_COMPUTE = 21 + FORBIDDEN_UPDATE_TO_MANAGED_SCAN = 43 MALFORMED_FILTER = 22 MALFORMED_RESOURCE_NAME = 23 PROJECT_INACTIVE = 24 @@ -248,7 +250,7 @@ class Code(enum.IntEnum): authentication or some other page outside of the scan scope. TOO_MANY_HTTP_ERRORS (int): Indicates that a scan encountered numerous errors from the web site pages. When available, most\_common\_http\_error\_code field indicates - the the most common HTTP error code encountered during the scan. + the most common HTTP error code encountered during the scan. """ CODE_UNSPECIFIED = 0 @@ -265,7 +267,7 @@ class Code(enum.IntEnum): """ Output only. Defines a warning message code. - Next id: 5 + Next id: 6 Attributes: CODE_UNSPECIFIED (int): Default value is never used. diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client.py index fc1dc384dd9b..8529b02bf489 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client.py @@ -260,11 +260,9 @@ def create_scan_config( >>> response = client.create_scan_config(parent, scan_config) Args: - parent (str): Required. - The parent resource name where the scan is created, which should be a + parent (str): Required. The parent resource name where the scan is created, which should be a project resource name in the format 'projects/{projectId}'. - scan_config (Union[dict, ~google.cloud.websecurityscanner_v1beta.types.ScanConfig]): Required. - The ScanConfig to be created. + scan_config (Union[dict, ~google.cloud.websecurityscanner_v1beta.types.ScanConfig]): Required. The ScanConfig to be created. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.websecurityscanner_v1beta.types.ScanConfig` @@ -338,8 +336,7 @@ def delete_scan_config( >>> client.delete_scan_config(name) Args: - name (str): Required. - The resource name of the ScanConfig to be deleted. The name follows the + name (str): Required. The resource name of the ScanConfig to be deleted. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -406,8 +403,7 @@ def get_scan_config( >>> response = client.get_scan_config(name) Args: - name (str): Required. - The resource name of the ScanConfig to be returned. The name follows the + name (str): Required. The resource name of the ScanConfig to be returned. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -490,8 +486,7 @@ def list_scan_configs( ... pass Args: - parent (str): Required. - The parent resource name, which should be a project resource name in the + parent (str): Required. The parent resource name, which should be a project resource name in the format 'projects/{projectId}'. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -587,8 +582,7 @@ def update_scan_config( >>> response = client.update_scan_config(scan_config, update_mask) Args: - scan_config (Union[dict, ~google.cloud.websecurityscanner_v1beta.types.ScanConfig]): Required. - The ScanConfig to be updated. The name field must be set to identify the + scan_config (Union[dict, ~google.cloud.websecurityscanner_v1beta.types.ScanConfig]): Required. The ScanConfig to be updated. The name field must be set to identify the resource to be updated. The values of fields not covered by the mask will be ignored. @@ -670,8 +664,7 @@ def start_scan_run( >>> response = client.start_scan_run(name) Args: - name (str): Required. - The resource name of the ScanConfig to be used. The name follows the + name (str): Required. The resource name of the ScanConfig to be used. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -741,8 +734,7 @@ def get_scan_run( >>> response = client.get_scan_run(name) Args: - name (str): Required. - The resource name of the ScanRun to be returned. The name follows the + name (str): Required. The resource name of the ScanRun to be returned. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -827,8 +819,7 @@ def list_scan_runs( ... pass Args: - parent (str): Required. - The parent resource name, which should be a scan resource name in the + parent (str): Required. The parent resource name, which should be a scan resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}'. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -919,8 +910,7 @@ def stop_scan_run( >>> response = client.stop_scan_run(name) Args: - name (str): Required. - The resource name of the ScanRun to be stopped. The name follows the + name (str): Required. The resource name of the ScanRun to be stopped. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1004,8 +994,7 @@ def list_crawled_urls( ... pass Args: - parent (str): Required. - The parent resource name, which should be a scan run resource name in the + parent (str): Required. The parent resource name, which should be a scan run resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. page_size (int): The maximum number of resources contained in the @@ -1097,8 +1086,7 @@ def get_finding( >>> response = client.get_finding(name) Args: - name (str): Required. - The resource name of the Finding to be returned. The name follows the + name (str): Required. The resource name of the Finding to be returned. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}/findings/{findingId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1186,12 +1174,11 @@ def list_findings( ... pass Args: - parent (str): Required. - The parent resource name, which should be a scan run resource name in the + parent (str): Required. The parent resource name, which should be a scan run resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - filter_ (str): The filter expression. The expression must be in the format: . Supported - field: 'finding\_type'. Supported operator: '='. + filter_ (str): Required. The filter expression. The expression must be in the format: . + Supported field: 'finding\_type'. Supported operator: '='. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -1281,8 +1268,7 @@ def list_finding_type_stats( >>> response = client.list_finding_type_stats(parent) Args: - parent (str): Required. - The parent resource name, which should be a scan run resource name in the + parent (str): Required. The parent resource name, which should be a scan run resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client_config.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client_config.py index d447761e5f4c..61dafbb68866 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client_config.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client_config.py @@ -24,7 +24,7 @@ }, "DeleteScanConfig": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "GetScanConfig": { diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto index 92a0dc663856..9316f8b2a269 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto @@ -17,8 +17,6 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; -import "google/api/annotations.proto"; - option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner"; option java_multiple_files = true; option java_outer_classname = "CrawledUrlProto"; @@ -29,16 +27,13 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; // Security Scanner Service crawls the web applications, following all links // within the scope of sites, to find the URLs to test against. message CrawledUrl { - // Output only. // The http method of the request that was used to visit the URL, in // uppercase. string http_method = 1; - // Output only. // The URL that was crawled. string url = 2; - // Output only. // The body of the request that was used to visit the URL. string body = 3; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url_pb2.py index 4fd988ae3ec1..2c38599e41c7 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url_pb2.py @@ -15,9 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto", package="google.cloud.websecurityscanner.v1beta", @@ -26,9 +23,8 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\017CrawledUrlProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\n>google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto"<\n\nCrawledUrl\x12\x13\n\x0bhttp_method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0c\n\x04\x62ody\x18\x03 \x01(\tB\xc2\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0f\x43rawledUrlProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\n>google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto\x12&google.cloud.websecurityscanner.v1beta"<\n\nCrawledUrl\x12\x13\n\x0bhttp_method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0c\n\x04\x62ody\x18\x03 \x01(\tB\xc2\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0f\x43rawledUrlProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) @@ -102,8 +98,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=136, - serialized_end=196, + serialized_start=106, + serialized_end=166, ) DESCRIPTOR.message_types_by_name["CrawledUrl"] = _CRAWLEDURL @@ -123,13 +119,12 @@ Attributes: http_method: - Output only. The http method of the request that was used to - visit the URL, in uppercase. + The http method of the request that was used to visit the URL, + in uppercase. url: - Output only. The URL that was crawled. + The URL that was crawled. body: - Output only. The body of the request that was used to visit - the URL. + The body of the request that was used to visit the URL. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.CrawledUrl) ), diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding.proto index 5e25abb5d061..5f86c9c71d05 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding.proto @@ -17,7 +17,7 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; -import "google/api/annotations.proto"; +import "google/api/resource.proto"; import "google/cloud/websecurityscanner/v1beta/finding_addon.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner"; @@ -29,79 +29,68 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; // A Finding resource represents a vulnerability instance identified during a // ScanRun. message Finding { - // Output only. + option (google.api.resource) = { + type: "websecurityscanner.googleapis.com/Finding" + pattern: "projects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}/findings/{finding}" + }; + // The resource name of the Finding. The name follows the format of // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanruns/{scanRunId}/findings/{findingId}'. // The finding IDs are generated by the system. string name = 1; - // Output only. // The type of the Finding. // Detailed and up-to-date information on findings can be found here: // https://cloud.google.com/security-scanner/docs/scan-result-details string finding_type = 2; - // Output only. // The http method of the request that triggered the vulnerability, in // uppercase. string http_method = 3; - // Output only. // The URL produced by the server-side fuzzer and used in the request that // triggered the vulnerability. string fuzzed_url = 4; - // Output only. // The body of the request that triggered the vulnerability. string body = 5; - // Output only. // The description of the vulnerability. string description = 6; - // Output only. // The URL containing human-readable payload that user can leverage to // reproduce the vulnerability. string reproduction_url = 7; - // Output only. // If the vulnerability was originated from nested IFrame, the immediate // parent IFrame is reported. string frame_url = 8; - // Output only. // The URL where the browser lands when the vulnerability is detected. string final_url = 9; - // Output only. // The tracking ID uniquely identifies a vulnerability instance across // multiple ScanRuns. string tracking_id = 10; - // Output only. // An addon containing information reported for a vulnerability with an HTML // form, if any. Form form = 16; - // Output only. // An addon containing information about outdated libraries. OutdatedLibrary outdated_library = 11; - // Output only. // An addon containing detailed information regarding any resource causing the // vulnerability such as JavaScript sources, image, audio files, etc. ViolatingResource violating_resource = 12; - // Output only. // An addon containing information about vulnerable or missing HTTP headers. VulnerableHeaders vulnerable_headers = 15; - // Output only. // An addon containing information about request parameters which were found // to be vulnerable. VulnerableParameters vulnerable_parameters = 13; - // Output only. // An addon containing information reported for an XSS, if any. Xss xss = 14; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto index d2759b831909..ea5989a13dae 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto @@ -17,8 +17,6 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; -import "google/api/annotations.proto"; - option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner"; option java_multiple_files = true; option java_outer_classname = "FindingAddonProto"; diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon_pb2.py index c029fbb15ca3..ee10ce7f056a 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon_pb2.py @@ -15,9 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto", package="google.cloud.websecurityscanner.v1beta", @@ -26,9 +23,8 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\021FindingAddonProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\n@google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto"*\n\x04\x46orm\x12\x12\n\naction_uri\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t"Q\n\x0fOutdatedLibrary\x12\x14\n\x0clibrary_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x17\n\x0flearn_more_urls\x18\x03 \x03(\t"?\n\x11ViolatingResource\x12\x14\n\x0c\x63ontent_type\x18\x01 \x01(\t\x12\x14\n\x0cresource_url\x18\x02 \x01(\t"/\n\x14VulnerableParameters\x12\x17\n\x0fparameter_names\x18\x01 \x03(\t"\xe8\x01\n\x11VulnerableHeaders\x12Q\n\x07headers\x18\x01 \x03(\x0b\x32@.google.cloud.websecurityscanner.v1beta.VulnerableHeaders.Header\x12Y\n\x0fmissing_headers\x18\x02 \x03(\x0b\x32@.google.cloud.websecurityscanner.v1beta.VulnerableHeaders.Header\x1a%\n\x06Header\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t"2\n\x03Xss\x12\x14\n\x0cstack_traces\x18\x01 \x03(\t\x12\x15\n\rerror_message\x18\x02 \x01(\tB\xc4\x01\n*com.google.cloud.websecurityscanner.v1betaB\x11\x46indingAddonProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\n@google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto\x12&google.cloud.websecurityscanner.v1beta"*\n\x04\x46orm\x12\x12\n\naction_uri\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t"Q\n\x0fOutdatedLibrary\x12\x14\n\x0clibrary_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x17\n\x0flearn_more_urls\x18\x03 \x03(\t"?\n\x11ViolatingResource\x12\x14\n\x0c\x63ontent_type\x18\x01 \x01(\t\x12\x14\n\x0cresource_url\x18\x02 \x01(\t"/\n\x14VulnerableParameters\x12\x17\n\x0fparameter_names\x18\x01 \x03(\t"\xe8\x01\n\x11VulnerableHeaders\x12Q\n\x07headers\x18\x01 \x03(\x0b\x32@.google.cloud.websecurityscanner.v1beta.VulnerableHeaders.Header\x12Y\n\x0fmissing_headers\x18\x02 \x03(\x0b\x32@.google.cloud.websecurityscanner.v1beta.VulnerableHeaders.Header\x1a%\n\x06Header\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t"2\n\x03Xss\x12\x14\n\x0cstack_traces\x18\x01 \x03(\t\x12\x15\n\rerror_message\x18\x02 \x01(\tB\xc4\x01\n*com.google.cloud.websecurityscanner.v1betaB\x11\x46indingAddonProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) @@ -84,8 +80,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=138, - serialized_end=180, + serialized_start=108, + serialized_end=150, ) @@ -159,8 +155,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=182, - serialized_end=263, + serialized_start=152, + serialized_end=233, ) @@ -216,8 +212,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=265, - serialized_end=328, + serialized_start=235, + serialized_end=298, ) @@ -255,8 +251,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=330, - serialized_end=377, + serialized_start=300, + serialized_end=347, ) @@ -312,8 +308,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=575, - serialized_end=612, + serialized_start=545, + serialized_end=582, ) _VULNERABLEHEADERS = _descriptor.Descriptor( @@ -368,8 +364,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=380, - serialized_end=612, + serialized_start=350, + serialized_end=582, ) @@ -425,8 +421,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=614, - serialized_end=664, + serialized_start=584, + serialized_end=634, ) _VULNERABLEHEADERS_HEADER.containing_type = _VULNERABLEHEADERS diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_pb2.py index 385aac2919bb..1d27a8a23088 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_pb2.py @@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.websecurityscanner_v1beta.proto import ( finding_addon_pb2 as google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_finding__addon__pb2, ) @@ -29,10 +29,10 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\014FindingProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\n:google/cloud/websecurityscanner_v1beta/proto/finding.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto\x1a@google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto"\xa2\x05\n\x07\x46inding\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x66inding_type\x18\x02 \x01(\t\x12\x13\n\x0bhttp_method\x18\x03 \x01(\t\x12\x12\n\nfuzzed_url\x18\x04 \x01(\t\x12\x0c\n\x04\x62ody\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x18\n\x10reproduction_url\x18\x07 \x01(\t\x12\x11\n\tframe_url\x18\x08 \x01(\t\x12\x11\n\tfinal_url\x18\t \x01(\t\x12\x13\n\x0btracking_id\x18\n \x01(\t\x12:\n\x04\x66orm\x18\x10 \x01(\x0b\x32,.google.cloud.websecurityscanner.v1beta.Form\x12Q\n\x10outdated_library\x18\x0b \x01(\x0b\x32\x37.google.cloud.websecurityscanner.v1beta.OutdatedLibrary\x12U\n\x12violating_resource\x18\x0c \x01(\x0b\x32\x39.google.cloud.websecurityscanner.v1beta.ViolatingResource\x12U\n\x12vulnerable_headers\x18\x0f \x01(\x0b\x32\x39.google.cloud.websecurityscanner.v1beta.VulnerableHeaders\x12[\n\x15vulnerable_parameters\x18\r \x01(\x0b\x32<.google.cloud.websecurityscanner.v1beta.VulnerableParameters\x12\x38\n\x03xss\x18\x0e \x01(\x0b\x32+.google.cloud.websecurityscanner.v1beta.XssB\xbf\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0c\x46indingProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\n:google/cloud/websecurityscanner_v1beta/proto/finding.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x19google/api/resource.proto\x1a@google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto"\xa9\x06\n\x07\x46inding\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x66inding_type\x18\x02 \x01(\t\x12\x13\n\x0bhttp_method\x18\x03 \x01(\t\x12\x12\n\nfuzzed_url\x18\x04 \x01(\t\x12\x0c\n\x04\x62ody\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x18\n\x10reproduction_url\x18\x07 \x01(\t\x12\x11\n\tframe_url\x18\x08 \x01(\t\x12\x11\n\tfinal_url\x18\t \x01(\t\x12\x13\n\x0btracking_id\x18\n \x01(\t\x12:\n\x04\x66orm\x18\x10 \x01(\x0b\x32,.google.cloud.websecurityscanner.v1beta.Form\x12Q\n\x10outdated_library\x18\x0b \x01(\x0b\x32\x37.google.cloud.websecurityscanner.v1beta.OutdatedLibrary\x12U\n\x12violating_resource\x18\x0c \x01(\x0b\x32\x39.google.cloud.websecurityscanner.v1beta.ViolatingResource\x12U\n\x12vulnerable_headers\x18\x0f \x01(\x0b\x32\x39.google.cloud.websecurityscanner.v1beta.VulnerableHeaders\x12[\n\x15vulnerable_parameters\x18\r \x01(\x0b\x32<.google.cloud.websecurityscanner.v1beta.VulnerableParameters\x12\x38\n\x03xss\x18\x0e \x01(\x0b\x32+.google.cloud.websecurityscanner.v1beta.Xss:\x84\x01\xea\x41\x80\x01\n)websecurityscanner.googleapis.com/Finding\x12Sprojects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}/findings/{finding}B\xbf\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0c\x46indingProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_finding__addon__pb2.DESCRIPTOR, ], ) @@ -337,13 +337,15 @@ extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A\200\001\n)websecurityscanner.googleapis.com/Finding\022Sprojects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}/findings/{finding}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=199, - serialized_end=873, + serialized_start=196, + serialized_end=1005, ) _FINDING.fields_by_name[ @@ -391,57 +393,54 @@ Attributes: name: - Output only. The resource name of the Finding. The name - follows the format of 'projects/{projectId}/scanConfigs/{scanC - onfigId}/scanruns/{scanRunId}/findings/{findingId}'. The - finding IDs are generated by the system. + The resource name of the Finding. The name follows the format + of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanruns/{ + scanRunId}/findings/{findingId}'. The finding IDs are + generated by the system. finding_type: - Output only. The type of the Finding. Detailed and up-to-date - information on findings can be found here: + The type of the Finding. Detailed and up-to-date information + on findings can be found here: https://cloud.google.com/security-scanner/docs/scan-result- details http_method: - Output only. The http method of the request that triggered the + The http method of the request that triggered the vulnerability, in uppercase. fuzzed_url: - Output only. The URL produced by the server-side fuzzer and - used in the request that triggered the vulnerability. + The URL produced by the server-side fuzzer and used in the + request that triggered the vulnerability. body: - Output only. The body of the request that triggered the - vulnerability. + The body of the request that triggered the vulnerability. description: - Output only. The description of the vulnerability. + The description of the vulnerability. reproduction_url: - Output only. The URL containing human-readable payload that - user can leverage to reproduce the vulnerability. + The URL containing human-readable payload that user can + leverage to reproduce the vulnerability. frame_url: - Output only. If the vulnerability was originated from nested - IFrame, the immediate parent IFrame is reported. + If the vulnerability was originated from nested IFrame, the + immediate parent IFrame is reported. final_url: - Output only. The URL where the browser lands when the - vulnerability is detected. + The URL where the browser lands when the vulnerability is + detected. tracking_id: - Output only. The tracking ID uniquely identifies a - vulnerability instance across multiple ScanRuns. + The tracking ID uniquely identifies a vulnerability instance + across multiple ScanRuns. form: - Output only. An addon containing information reported for a - vulnerability with an HTML form, if any. + An addon containing information reported for a vulnerability + with an HTML form, if any. outdated_library: - Output only. An addon containing information about outdated - libraries. + An addon containing information about outdated libraries. violating_resource: - Output only. An addon containing detailed information - regarding any resource causing the vulnerability such as - JavaScript sources, image, audio files, etc. + An addon containing detailed information regarding any + resource causing the vulnerability such as JavaScript sources, + image, audio files, etc. vulnerable_headers: - Output only. An addon containing information about vulnerable - or missing HTTP headers. + An addon containing information about vulnerable or missing + HTTP headers. vulnerable_parameters: - Output only. An addon containing information about request - parameters which were found to be vulnerable. + An addon containing information about request parameters which + were found to be vulnerable. xss: - Output only. An addon containing information reported for an - XSS, if any. + An addon containing information reported for an XSS, if any. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.Finding) ), @@ -450,4 +449,5 @@ DESCRIPTOR._options = None +_FINDING._options = None # @@protoc_insertion_point(module_scope) diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto index cbace23d3433..97f4882d1540 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto @@ -17,8 +17,6 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; -import "google/api/annotations.proto"; - option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner"; option java_multiple_files = true; option java_outer_classname = "FindingTypeStatsProto"; @@ -28,11 +26,9 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; // A FindingTypeStats resource represents stats regarding a specific FindingType // of Findings under a given ScanRun. message FindingTypeStats { - // Output only. // The finding type associated with the stats. string finding_type = 1; - // Output only. // The count of findings belonging to this finding type. int32 finding_count = 2; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats_pb2.py index 3fa52a7f3321..2c522f67dcf3 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats_pb2.py @@ -15,9 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto", package="google.cloud.websecurityscanner.v1beta", @@ -26,9 +23,8 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\025FindingTypeStatsProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\nEgoogle/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto"?\n\x10\x46indingTypeStats\x12\x14\n\x0c\x66inding_type\x18\x01 \x01(\t\x12\x15\n\rfinding_count\x18\x02 \x01(\x05\x42\xc8\x01\n*com.google.cloud.websecurityscanner.v1betaB\x15\x46indingTypeStatsProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\nEgoogle/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto\x12&google.cloud.websecurityscanner.v1beta"?\n\x10\x46indingTypeStats\x12\x14\n\x0c\x66inding_type\x18\x01 \x01(\t\x12\x15\n\rfinding_count\x18\x02 \x01(\x05\x42\xc8\x01\n*com.google.cloud.websecurityscanner.v1betaB\x15\x46indingTypeStatsProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) @@ -84,8 +80,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=143, - serialized_end=206, + serialized_start=113, + serialized_end=176, ) DESCRIPTOR.message_types_by_name["FindingTypeStats"] = _FINDINGTYPESTATS @@ -103,10 +99,9 @@ Attributes: finding_type: - Output only. The finding type associated with the stats. + The finding type associated with the stats. finding_count: - Output only. The count of findings belonging to this finding - type. + The count of findings belonging to this finding type. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.FindingTypeStats) ), diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config.proto index d698bb4f886e..c2b7dcb31727 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config.proto @@ -17,7 +17,8 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; -import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/websecurityscanner/v1beta/scan_run.proto"; import "google/protobuf/timestamp.proto"; @@ -29,36 +30,40 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; // A ScanConfig resource contains the configurations to launch a scan. message ScanConfig { + option (google.api.resource) = { + type: "websecurityscanner.googleapis.com/ScanConfig" + pattern: "projects/{project}/scanConfigs/{scan_config}" + }; + // Scan authentication configuration. message Authentication { // Describes authentication configuration that uses a Google account. message GoogleAccount { - // Required. - // The user name of the Google account. - string username = 1; + // Required. The user name of the Google account. + string username = 1 [(google.api.field_behavior) = REQUIRED]; - // Input only. - // Required. - // The password of the Google account. The credential is stored encrypted + // Required. Input only. The password of the Google account. The credential is stored encrypted // and not returned in any response nor included in audit logs. - string password = 2; + string password = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.field_behavior) = INPUT_ONLY + ]; } // Describes authentication configuration that uses a custom account. message CustomAccount { - // Required. - // The user name of the custom account. - string username = 1; + // Required. The user name of the custom account. + string username = 1 [(google.api.field_behavior) = REQUIRED]; - // Input only. - // Required. - // The password of the custom account. The credential is stored encrypted + // Required. Input only. The password of the custom account. The credential is stored encrypted // and not returned in any response nor included in audit logs. - string password = 2; + string password = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.field_behavior) = INPUT_ONLY + ]; - // Required. - // The login form URL of the website. - string login_url = 3; + // Required. The login form URL of the website. + string login_url = 3 [(google.api.field_behavior) = REQUIRED]; } // Required. @@ -80,9 +85,8 @@ message ScanConfig { // immediately. google.protobuf.Timestamp schedule_time = 1; - // Required. - // The duration of time between executions in days. - int32 interval_duration_days = 2; + // Required. The duration of time between executions in days. + int32 interval_duration_days = 2 [(google.api.field_behavior) = REQUIRED]; } // Type of user agents used for scanning. @@ -145,9 +149,8 @@ message ScanConfig { // generated by the system. string name = 1; - // Required. - // The user provided display name of the ScanConfig. - string display_name = 2; + // Required. The user provided display name of the ScanConfig. + string display_name = 2 [(google.api.field_behavior) = REQUIRED]; // The maximum QPS during scanning. A valid value ranges from 5 to 20 // inclusively. If the field is unspecified or its value is set 0, server will @@ -155,9 +158,8 @@ message ScanConfig { // INVALID_ARGUMENT error. int32 max_qps = 3; - // Required. - // The starting URLs from which the scanner finds site pages. - repeated string starting_urls = 4; + // Required. The starting URLs from which the scanner finds site pages. + repeated string starting_urls = 4 [(google.api.field_behavior) = REQUIRED]; // The authentication configuration. If specified, service will use the // authentication configuration during scanning. diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error.proto index c55c744c6272..a50bdcaf37f6 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error.proto @@ -30,7 +30,7 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; message ScanConfigError { // Output only. // Defines an error reason code. - // Next id: 43 + // Next id: 44 enum Code { option allow_alias = true; @@ -106,6 +106,9 @@ message ScanConfigError { // Google Compute Engine Scanning Alpha access. FORBIDDEN_TO_SCAN_COMPUTE = 21; + // User tries to update managed scan + FORBIDDEN_UPDATE_TO_MANAGED_SCAN = 43; + // The supplied filter is malformed. For example, it can not be parsed, does // not have a filter type in expression, or the same filter type appears // more than once. @@ -174,11 +177,9 @@ message ScanConfigError { UNSUPPORTED_URL_SCHEME = 42; } - // Output only. // Indicates the reason code for a configuration failure. Code code = 1; - // Output only. // Indicates the full name of the ScanConfig field that triggers this error, // for example "scan_config.max_qps". This field is provided for // troubleshooting purposes only and its actual value can change in the diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error_pb2.py index 03dfa6cae009..4d907be487b3 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error_pb2.py @@ -23,7 +23,7 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\024ScanConfigErrorProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\nDgoogle/cloud/websecurityscanner_v1beta/proto/scan_config_error.proto\x12&google.cloud.websecurityscanner.v1beta"\xc7\x0b\n\x0fScanConfigError\x12J\n\x04\x63ode\x18\x01 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1beta.ScanConfigError.Code\x12\x12\n\nfield_name\x18\x02 \x01(\t"\xd3\n\n\x04\x43ode\x12\x14\n\x10\x43ODE_UNSPECIFIED\x10\x00\x12\x06\n\x02OK\x10\x00\x12\x12\n\x0eINTERNAL_ERROR\x10\x01\x12\x1f\n\x1b\x41PPENGINE_API_BACKEND_ERROR\x10\x02\x12 \n\x1c\x41PPENGINE_API_NOT_ACCESSIBLE\x10\x03\x12"\n\x1e\x41PPENGINE_DEFAULT_HOST_MISSING\x10\x04\x12!\n\x1d\x43\x41NNOT_USE_GOOGLE_COM_ACCOUNT\x10\x06\x12\x1c\n\x18\x43\x41NNOT_USE_OWNER_ACCOUNT\x10\x07\x12\x1d\n\x19\x43OMPUTE_API_BACKEND_ERROR\x10\x08\x12\x1e\n\x1a\x43OMPUTE_API_NOT_ACCESSIBLE\x10\t\x12\x37\n3CUSTOM_LOGIN_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT\x10\n\x12\x1e\n\x1a\x43USTOM_LOGIN_URL_MALFORMED\x10\x0b\x12\x33\n/CUSTOM_LOGIN_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS\x10\x0c\x12\x31\n-CUSTOM_LOGIN_URL_MAPPED_TO_UNRESERVED_ADDRESS\x10\r\x12\x30\n,CUSTOM_LOGIN_URL_HAS_NON_ROUTABLE_IP_ADDRESS\x10\x0e\x12.\n*CUSTOM_LOGIN_URL_HAS_UNRESERVED_IP_ADDRESS\x10\x0f\x12\x17\n\x13\x44UPLICATE_SCAN_NAME\x10\x10\x12\x17\n\x13INVALID_FIELD_VALUE\x10\x12\x12$\n FAILED_TO_AUTHENTICATE_TO_TARGET\x10\x13\x12\x1c\n\x18\x46INDING_TYPE_UNSPECIFIED\x10\x14\x12\x1d\n\x19\x46ORBIDDEN_TO_SCAN_COMPUTE\x10\x15\x12\x14\n\x10MALFORMED_FILTER\x10\x16\x12\x1b\n\x17MALFORMED_RESOURCE_NAME\x10\x17\x12\x14\n\x10PROJECT_INACTIVE\x10\x18\x12\x12\n\x0eREQUIRED_FIELD\x10\x19\x12\x1e\n\x1aRESOURCE_NAME_INCONSISTENT\x10\x1a\x12\x18\n\x14SCAN_ALREADY_RUNNING\x10\x1b\x12\x14\n\x10SCAN_NOT_RUNNING\x10\x1c\x12/\n+SEED_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT\x10\x1d\x12\x16\n\x12SEED_URL_MALFORMED\x10\x1e\x12+\n\'SEED_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS\x10\x1f\x12)\n%SEED_URL_MAPPED_TO_UNRESERVED_ADDRESS\x10 \x12(\n$SEED_URL_HAS_NON_ROUTABLE_IP_ADDRESS\x10!\x12&\n"SEED_URL_HAS_UNRESERVED_IP_ADDRESS\x10#\x12"\n\x1eSERVICE_ACCOUNT_NOT_CONFIGURED\x10$\x12\x12\n\x0eTOO_MANY_SCANS\x10%\x12"\n\x1eUNABLE_TO_RESOLVE_PROJECT_INFO\x10&\x12(\n$UNSUPPORTED_BLACKLIST_PATTERN_FORMAT\x10\'\x12\x16\n\x12UNSUPPORTED_FILTER\x10(\x12\x1c\n\x18UNSUPPORTED_FINDING_TYPE\x10)\x12\x1a\n\x16UNSUPPORTED_URL_SCHEME\x10*\x1a\x02\x10\x01\x42\xc7\x01\n*com.google.cloud.websecurityscanner.v1betaB\x14ScanConfigErrorProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\nDgoogle/cloud/websecurityscanner_v1beta/proto/scan_config_error.proto\x12&google.cloud.websecurityscanner.v1beta"\xed\x0b\n\x0fScanConfigError\x12J\n\x04\x63ode\x18\x01 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1beta.ScanConfigError.Code\x12\x12\n\nfield_name\x18\x02 \x01(\t"\xf9\n\n\x04\x43ode\x12\x14\n\x10\x43ODE_UNSPECIFIED\x10\x00\x12\x06\n\x02OK\x10\x00\x12\x12\n\x0eINTERNAL_ERROR\x10\x01\x12\x1f\n\x1b\x41PPENGINE_API_BACKEND_ERROR\x10\x02\x12 \n\x1c\x41PPENGINE_API_NOT_ACCESSIBLE\x10\x03\x12"\n\x1e\x41PPENGINE_DEFAULT_HOST_MISSING\x10\x04\x12!\n\x1d\x43\x41NNOT_USE_GOOGLE_COM_ACCOUNT\x10\x06\x12\x1c\n\x18\x43\x41NNOT_USE_OWNER_ACCOUNT\x10\x07\x12\x1d\n\x19\x43OMPUTE_API_BACKEND_ERROR\x10\x08\x12\x1e\n\x1a\x43OMPUTE_API_NOT_ACCESSIBLE\x10\t\x12\x37\n3CUSTOM_LOGIN_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT\x10\n\x12\x1e\n\x1a\x43USTOM_LOGIN_URL_MALFORMED\x10\x0b\x12\x33\n/CUSTOM_LOGIN_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS\x10\x0c\x12\x31\n-CUSTOM_LOGIN_URL_MAPPED_TO_UNRESERVED_ADDRESS\x10\r\x12\x30\n,CUSTOM_LOGIN_URL_HAS_NON_ROUTABLE_IP_ADDRESS\x10\x0e\x12.\n*CUSTOM_LOGIN_URL_HAS_UNRESERVED_IP_ADDRESS\x10\x0f\x12\x17\n\x13\x44UPLICATE_SCAN_NAME\x10\x10\x12\x17\n\x13INVALID_FIELD_VALUE\x10\x12\x12$\n FAILED_TO_AUTHENTICATE_TO_TARGET\x10\x13\x12\x1c\n\x18\x46INDING_TYPE_UNSPECIFIED\x10\x14\x12\x1d\n\x19\x46ORBIDDEN_TO_SCAN_COMPUTE\x10\x15\x12$\n FORBIDDEN_UPDATE_TO_MANAGED_SCAN\x10+\x12\x14\n\x10MALFORMED_FILTER\x10\x16\x12\x1b\n\x17MALFORMED_RESOURCE_NAME\x10\x17\x12\x14\n\x10PROJECT_INACTIVE\x10\x18\x12\x12\n\x0eREQUIRED_FIELD\x10\x19\x12\x1e\n\x1aRESOURCE_NAME_INCONSISTENT\x10\x1a\x12\x18\n\x14SCAN_ALREADY_RUNNING\x10\x1b\x12\x14\n\x10SCAN_NOT_RUNNING\x10\x1c\x12/\n+SEED_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT\x10\x1d\x12\x16\n\x12SEED_URL_MALFORMED\x10\x1e\x12+\n\'SEED_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS\x10\x1f\x12)\n%SEED_URL_MAPPED_TO_UNRESERVED_ADDRESS\x10 \x12(\n$SEED_URL_HAS_NON_ROUTABLE_IP_ADDRESS\x10!\x12&\n"SEED_URL_HAS_UNRESERVED_IP_ADDRESS\x10#\x12"\n\x1eSERVICE_ACCOUNT_NOT_CONFIGURED\x10$\x12\x12\n\x0eTOO_MANY_SCANS\x10%\x12"\n\x1eUNABLE_TO_RESOLVE_PROJECT_INFO\x10&\x12(\n$UNSUPPORTED_BLACKLIST_PATTERN_FORMAT\x10\'\x12\x16\n\x12UNSUPPORTED_FILTER\x10(\x12\x1c\n\x18UNSUPPORTED_FINDING_TYPE\x10)\x12\x1a\n\x16UNSUPPORTED_URL_SCHEME\x10*\x1a\x02\x10\x01\x42\xc7\x01\n*com.google.cloud.websecurityscanner.v1betaB\x14ScanConfigErrorProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), ) @@ -174,141 +174,148 @@ type=None, ), _descriptor.EnumValueDescriptor( - name="MALFORMED_FILTER", + name="FORBIDDEN_UPDATE_TO_MANAGED_SCAN", index=21, + number=43, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="MALFORMED_FILTER", + index=22, number=22, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="MALFORMED_RESOURCE_NAME", - index=22, + index=23, number=23, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="PROJECT_INACTIVE", - index=23, + index=24, number=24, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="REQUIRED_FIELD", - index=24, + index=25, number=25, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="RESOURCE_NAME_INCONSISTENT", - index=25, + index=26, number=26, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SCAN_ALREADY_RUNNING", - index=26, + index=27, number=27, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SCAN_NOT_RUNNING", - index=27, + index=28, number=28, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SEED_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT", - index=28, + index=29, number=29, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SEED_URL_MALFORMED", - index=29, + index=30, number=30, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SEED_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS", - index=30, + index=31, number=31, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SEED_URL_MAPPED_TO_UNRESERVED_ADDRESS", - index=31, + index=32, number=32, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SEED_URL_HAS_NON_ROUTABLE_IP_ADDRESS", - index=32, + index=33, number=33, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SEED_URL_HAS_UNRESERVED_IP_ADDRESS", - index=33, + index=34, number=35, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SERVICE_ACCOUNT_NOT_CONFIGURED", - index=34, + index=35, number=36, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="TOO_MANY_SCANS", - index=35, + index=36, number=37, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="UNABLE_TO_RESOLVE_PROJECT_INFO", - index=36, + index=37, number=38, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="UNSUPPORTED_BLACKLIST_PATTERN_FORMAT", - index=37, + index=38, number=39, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="UNSUPPORTED_FILTER", - index=38, + index=39, number=40, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="UNSUPPORTED_FINDING_TYPE", - index=39, + index=40, number=41, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="UNSUPPORTED_URL_SCHEME", - index=40, + index=41, number=42, serialized_options=None, type=None, @@ -317,7 +324,7 @@ containing_type=None, serialized_options=_b("\020\001"), serialized_start=229, - serialized_end=1592, + serialized_end=1630, ) _sym_db.RegisterEnumDescriptor(_SCANCONFIGERROR_CODE) @@ -375,7 +382,7 @@ extension_ranges=[], oneofs=[], serialized_start=113, - serialized_end=1592, + serialized_end=1630, ) _SCANCONFIGERROR.fields_by_name["code"].enum_type = _SCANCONFIGERROR_CODE @@ -397,13 +404,12 @@ Attributes: code: - Output only. Indicates the reason code for a configuration - failure. + Indicates the reason code for a configuration failure. field_name: - Output only. Indicates the full name of the ScanConfig field - that triggers this error, for example "scan\_config.max\_qps". - This field is provided for troubleshooting purposes only and - its actual value can change in the future. + Indicates the full name of the ScanConfig field that triggers + this error, for example "scan\_config.max\_qps". This field is + provided for troubleshooting purposes only and its actual + value can change in the future. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.ScanConfigError) ), diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_pb2.py index 79752f0de426..c7290be00774 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_pb2.py @@ -15,7 +15,8 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.websecurityscanner_v1beta.proto import ( scan_run_pb2 as google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__pb2, ) @@ -30,10 +31,11 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\017ScanConfigProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\n>google/cloud/websecurityscanner_v1beta/proto/scan_config.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto\x1a;google/cloud/websecurityscanner_v1beta/proto/scan_run.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x97\x0c\n\nScanConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x0f\n\x07max_qps\x18\x03 \x01(\x05\x12\x15\n\rstarting_urls\x18\x04 \x03(\t\x12Y\n\x0e\x61uthentication\x18\x05 \x01(\x0b\x32\x41.google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication\x12P\n\nuser_agent\x18\x06 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1beta.ScanConfig.UserAgent\x12\x1a\n\x12\x62lacklist_patterns\x18\x07 \x03(\t\x12M\n\x08schedule\x18\x08 \x01(\x0b\x32;.google.cloud.websecurityscanner.v1beta.ScanConfig.Schedule\x12[\n\x10target_platforms\x18\t \x03(\x0e\x32\x41.google.cloud.websecurityscanner.v1beta.ScanConfig.TargetPlatform\x12{\n!export_to_security_command_center\x18\n \x01(\x0e\x32P.google.cloud.websecurityscanner.v1beta.ScanConfig.ExportToSecurityCommandCenter\x12\x43\n\nlatest_run\x18\x0b \x01(\x0b\x32/.google.cloud.websecurityscanner.v1beta.ScanRun\x12P\n\nrisk_level\x18\x0c \x01(\x0e\x32<.google.cloud.websecurityscanner.v1beta.ScanConfig.RiskLevel\x1a\xf5\x02\n\x0e\x41uthentication\x12i\n\x0egoogle_account\x18\x01 \x01(\x0b\x32O.google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication.GoogleAccountH\x00\x12i\n\x0e\x63ustom_account\x18\x02 \x01(\x0b\x32O.google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication.CustomAccountH\x00\x1a\x33\n\rGoogleAccount\x12\x10\n\x08username\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x1a\x46\n\rCustomAccount\x12\x10\n\x08username\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x12\x11\n\tlogin_url\x18\x03 \x01(\tB\x10\n\x0e\x61uthentication\x1a]\n\x08Schedule\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1e\n\x16interval_duration_days\x18\x02 \x01(\x05"`\n\tUserAgent\x12\x1a\n\x16USER_AGENT_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x43HROME_LINUX\x10\x01\x12\x12\n\x0e\x43HROME_ANDROID\x10\x02\x12\x11\n\rSAFARI_IPHONE\x10\x03"N\n\x0eTargetPlatform\x12\x1f\n\x1bTARGET_PLATFORM_UNSPECIFIED\x10\x00\x12\x0e\n\nAPP_ENGINE\x10\x01\x12\x0b\n\x07\x43OMPUTE\x10\x02"<\n\tRiskLevel\x12\x1a\n\x16RISK_LEVEL_UNSPECIFIED\x10\x00\x12\n\n\x06NORMAL\x10\x01\x12\x07\n\x03LOW\x10\x02"m\n\x1d\x45xportToSecurityCommandCenter\x12\x31\n-EXPORT_TO_SECURITY_COMMAND_CENTER_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\x0c\n\x08\x44ISABLED\x10\x02\x42\xc2\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0fScanConfigProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\n>google/cloud/websecurityscanner_v1beta/proto/scan_config.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a;google/cloud/websecurityscanner_v1beta/proto/scan_run.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xa6\r\n\nScanConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x19\n\x0c\x64isplay_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x07max_qps\x18\x03 \x01(\x05\x12\x1a\n\rstarting_urls\x18\x04 \x03(\tB\x03\xe0\x41\x02\x12Y\n\x0e\x61uthentication\x18\x05 \x01(\x0b\x32\x41.google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication\x12P\n\nuser_agent\x18\x06 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1beta.ScanConfig.UserAgent\x12\x1a\n\x12\x62lacklist_patterns\x18\x07 \x03(\t\x12M\n\x08schedule\x18\x08 \x01(\x0b\x32;.google.cloud.websecurityscanner.v1beta.ScanConfig.Schedule\x12[\n\x10target_platforms\x18\t \x03(\x0e\x32\x41.google.cloud.websecurityscanner.v1beta.ScanConfig.TargetPlatform\x12{\n!export_to_security_command_center\x18\n \x01(\x0e\x32P.google.cloud.websecurityscanner.v1beta.ScanConfig.ExportToSecurityCommandCenter\x12\x43\n\nlatest_run\x18\x0b \x01(\x0b\x32/.google.cloud.websecurityscanner.v1beta.ScanRun\x12P\n\nrisk_level\x18\x0c \x01(\x0e\x32<.google.cloud.websecurityscanner.v1beta.ScanConfig.RiskLevel\x1a\x94\x03\n\x0e\x41uthentication\x12i\n\x0egoogle_account\x18\x01 \x01(\x0b\x32O.google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication.GoogleAccountH\x00\x12i\n\x0e\x63ustom_account\x18\x02 \x01(\x0b\x32O.google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication.CustomAccountH\x00\x1a@\n\rGoogleAccount\x12\x15\n\x08username\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x08password\x18\x02 \x01(\tB\x06\xe0\x41\x02\xe0\x41\x04\x1aX\n\rCustomAccount\x12\x15\n\x08username\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x08password\x18\x02 \x01(\tB\x06\xe0\x41\x02\xe0\x41\x04\x12\x16\n\tlogin_url\x18\x03 \x01(\tB\x03\xe0\x41\x02\x42\x10\n\x0e\x61uthentication\x1a\x62\n\x08Schedule\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12#\n\x16interval_duration_days\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02"`\n\tUserAgent\x12\x1a\n\x16USER_AGENT_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x43HROME_LINUX\x10\x01\x12\x12\n\x0e\x43HROME_ANDROID\x10\x02\x12\x11\n\rSAFARI_IPHONE\x10\x03"N\n\x0eTargetPlatform\x12\x1f\n\x1bTARGET_PLATFORM_UNSPECIFIED\x10\x00\x12\x0e\n\nAPP_ENGINE\x10\x01\x12\x0b\n\x07\x43OMPUTE\x10\x02"<\n\tRiskLevel\x12\x1a\n\x16RISK_LEVEL_UNSPECIFIED\x10\x00\x12\n\n\x06NORMAL\x10\x01\x12\x07\n\x03LOW\x10\x02"m\n\x1d\x45xportToSecurityCommandCenter\x12\x31\n-EXPORT_TO_SECURITY_COMMAND_CENTER_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\x0c\n\x08\x44ISABLED\x10\x02:_\xea\x41\\\n,websecurityscanner.googleapis.com/ScanConfig\x12,projects/{project}/scanConfigs/{scan_config}B\xc2\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0fScanConfigProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, ], @@ -65,8 +67,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1441, - serialized_end=1537, + serialized_start=1517, + serialized_end=1613, ) _sym_db.RegisterEnumDescriptor(_SCANCONFIG_USERAGENT) @@ -92,8 +94,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1539, - serialized_end=1617, + serialized_start=1615, + serialized_end=1693, ) _sym_db.RegisterEnumDescriptor(_SCANCONFIG_TARGETPLATFORM) @@ -119,8 +121,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1619, - serialized_end=1679, + serialized_start=1695, + serialized_end=1755, ) _sym_db.RegisterEnumDescriptor(_SCANCONFIG_RISKLEVEL) @@ -146,8 +148,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1681, - serialized_end=1790, + serialized_start=1757, + serialized_end=1866, ) _sym_db.RegisterEnumDescriptor(_SCANCONFIG_EXPORTTOSECURITYCOMMANDCENTER) @@ -174,7 +176,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -192,7 +194,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002\340A\004"), file=DESCRIPTOR, ), ], @@ -204,8 +206,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1203, - serialized_end=1254, + serialized_start=1243, + serialized_end=1307, ) _SCANCONFIG_AUTHENTICATION_CUSTOMACCOUNT = _descriptor.Descriptor( @@ -230,7 +232,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -248,7 +250,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002\340A\004"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -266,7 +268,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -278,8 +280,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1256, - serialized_end=1326, + serialized_start=1309, + serialized_end=1397, ) _SCANCONFIG_AUTHENTICATION = _descriptor.Descriptor( @@ -345,8 +347,8 @@ fields=[], ) ], - serialized_start=971, - serialized_end=1344, + serialized_start=1011, + serialized_end=1415, ) _SCANCONFIG_SCHEDULE = _descriptor.Descriptor( @@ -389,7 +391,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -401,8 +403,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1346, - serialized_end=1439, + serialized_start=1417, + serialized_end=1515, ) _SCANCONFIG = _descriptor.Descriptor( @@ -445,7 +447,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -481,7 +483,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -637,13 +639,15 @@ _SCANCONFIG_RISKLEVEL, _SCANCONFIG_EXPORTTOSECURITYCOMMANDCENTER, ], - serialized_options=None, + serialized_options=_b( + "\352A\\\n,websecurityscanner.googleapis.com/ScanConfig\022,projects/{project}/scanConfigs/{scan_config}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=231, - serialized_end=1790, + serialized_start=261, + serialized_end=1963, ) _SCANCONFIG_AUTHENTICATION_GOOGLEACCOUNT.containing_type = _SCANCONFIG_AUTHENTICATION @@ -712,7 +716,7 @@ username: Required. The user name of the Google account. password: - Input only. Required. The password of the Google account. The + Required. Input only. The password of the Google account. The credential is stored encrypted and not returned in any response nor included in audit logs. """, @@ -732,7 +736,7 @@ username: Required. The user name of the custom account. password: - Input only. Required. The password of the custom account. The + Required. Input only. The password of the custom account. The credential is stored encrypted and not returned in any response nor included in audit logs. login_url: @@ -830,4 +834,13 @@ DESCRIPTOR._options = None +_SCANCONFIG_AUTHENTICATION_GOOGLEACCOUNT.fields_by_name["username"]._options = None +_SCANCONFIG_AUTHENTICATION_GOOGLEACCOUNT.fields_by_name["password"]._options = None +_SCANCONFIG_AUTHENTICATION_CUSTOMACCOUNT.fields_by_name["username"]._options = None +_SCANCONFIG_AUTHENTICATION_CUSTOMACCOUNT.fields_by_name["password"]._options = None +_SCANCONFIG_AUTHENTICATION_CUSTOMACCOUNT.fields_by_name["login_url"]._options = None +_SCANCONFIG_SCHEDULE.fields_by_name["interval_duration_days"]._options = None +_SCANCONFIG.fields_by_name["display_name"]._options = None +_SCANCONFIG.fields_by_name["starting_urls"]._options = None +_SCANCONFIG._options = None # @@protoc_insertion_point(module_scope) diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run.proto index 497edc39bd1b..84564c6e2f50 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run.proto @@ -17,7 +17,7 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; -import "google/api/annotations.proto"; +import "google/api/resource.proto"; import "google/cloud/websecurityscanner/v1beta/scan_run_error_trace.proto"; import "google/cloud/websecurityscanner/v1beta/scan_run_warning_trace.proto"; import "google/protobuf/timestamp.proto"; @@ -31,6 +31,11 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; // A ScanRun is a output-only resource representing an actual run of the scan. // Next id: 12 message ScanRun { + option (google.api.resource) = { + type: "websecurityscanner.googleapis.com/ScanRun" + pattern: "projects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}" + }; + // Types of ScanRun execution state. enum ExecutionState { // Represents an invalid state caused by internal server error. This value @@ -63,59 +68,48 @@ message ScanRun { KILLED = 3; } - // Output only. // The resource name of the ScanRun. The name follows the format of // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. // The ScanRun IDs are generated by the system. string name = 1; - // Output only. // The execution state of the ScanRun. ExecutionState execution_state = 2; - // Output only. // The result state of the ScanRun. This field is only available after the // execution state reaches "FINISHED". ResultState result_state = 3; - // Output only. // The time at which the ScanRun started. google.protobuf.Timestamp start_time = 4; - // Output only. // The time at which the ScanRun reached termination state - that the ScanRun // is either finished or stopped by user. google.protobuf.Timestamp end_time = 5; - // Output only. // The number of URLs crawled during this ScanRun. If the scan is in progress, // the value represents the number of URLs crawled up to now. int64 urls_crawled_count = 6; - // Output only. // The number of URLs tested during this ScanRun. If the scan is in progress, // the value represents the number of URLs tested up to now. The number of // URLs tested is usually larger than the number URLS crawled because // typically a crawled URL is tested with multiple test payloads. int64 urls_tested_count = 7; - // Output only. // Whether the scan run has found any vulnerabilities. bool has_vulnerabilities = 8; - // Output only. // The percentage of total completion ranging from 0 to 100. // If the scan is in queue, the value is 0. // If the scan is running, the value ranges from 0 to 100. // If the scan is finished, the value is 100. int32 progress_percent = 9; - // Output only. // If result_state is an ERROR, this field provides the primary reason for // scan's termination and more details, if such are available. ScanRunErrorTrace error_trace = 10; - // Output only. // A list of warnings, if such are encountered during this scan run. repeated ScanRunWarningTrace warning_traces = 11; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace.proto index 3d1a391dc6da..248967d245a4 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace.proto @@ -55,21 +55,18 @@ message ScanRunErrorTrace { // Indicates that a scan encountered numerous errors from the web site // pages. When available, most_common_http_error_code field indicates the - // the most common HTTP error code encountered during the scan. + // most common HTTP error code encountered during the scan. TOO_MANY_HTTP_ERRORS = 6; } - // Output only. // Indicates the error reason code. Code code = 1; - // Output only. // If the scan encounters SCAN_CONFIG_ISSUE error, this field has the error // message encountered during scan configuration validation that is performed // before each scan run. ScanConfigError scan_config_error = 2; - // Output only. // If the scan encounters TOO_MANY_HTTP_ERRORS, this field indicates the most // common HTTP error code, if such is available. For example, if this code is // 404, the scan has encountered too many NOT_FOUND responses. diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace_pb2.py index 9d928fa70de8..b5975a795354 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace_pb2.py @@ -191,16 +191,15 @@ Attributes: code: - Output only. Indicates the error reason code. + Indicates the error reason code. scan_config_error: - Output only. If the scan encounters SCAN\_CONFIG\_ISSUE error, - this field has the error message encountered during scan - configuration validation that is performed before each scan - run. + If the scan encounters SCAN\_CONFIG\_ISSUE error, this field + has the error message encountered during scan configuration + validation that is performed before each scan run. most_common_http_error_code: - Output only. If the scan encounters TOO\_MANY\_HTTP\_ERRORS, - this field indicates the most common HTTP error code, if such - is available. For example, if this code is 404, the scan has + If the scan encounters TOO\_MANY\_HTTP\_ERRORS, this field + indicates the most common HTTP error code, if such is + available. For example, if this code is 404, the scan has encountered too many NOT\_FOUND responses. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.ScanRunErrorTrace) diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_pb2.py index eb528ae4ff57..6354b6fd38e1 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_pb2.py @@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.websecurityscanner_v1beta.proto import ( scan_run_error_trace_pb2 as google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__error__trace__pb2, ) @@ -33,10 +33,10 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\014ScanRunProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\n;google/cloud/websecurityscanner_v1beta/proto/scan_run.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto\x1aGgoogle/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace.proto\x1aIgoogle/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe0\x05\n\x07ScanRun\x12\x0c\n\x04name\x18\x01 \x01(\t\x12W\n\x0f\x65xecution_state\x18\x02 \x01(\x0e\x32>.google.cloud.websecurityscanner.v1beta.ScanRun.ExecutionState\x12Q\n\x0cresult_state\x18\x03 \x01(\x0e\x32;.google.cloud.websecurityscanner.v1beta.ScanRun.ResultState\x12.\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1a\n\x12urls_crawled_count\x18\x06 \x01(\x03\x12\x19\n\x11urls_tested_count\x18\x07 \x01(\x03\x12\x1b\n\x13has_vulnerabilities\x18\x08 \x01(\x08\x12\x18\n\x10progress_percent\x18\t \x01(\x05\x12N\n\x0b\x65rror_trace\x18\n \x01(\x0b\x32\x39.google.cloud.websecurityscanner.v1beta.ScanRunErrorTrace\x12S\n\x0ewarning_traces\x18\x0b \x03(\x0b\x32;.google.cloud.websecurityscanner.v1beta.ScanRunWarningTrace"Y\n\x0e\x45xecutionState\x12\x1f\n\x1b\x45XECUTION_STATE_UNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x0c\n\x08SCANNING\x10\x02\x12\x0c\n\x08\x46INISHED\x10\x03"O\n\x0bResultState\x12\x1c\n\x18RESULT_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07SUCCESS\x10\x01\x12\t\n\x05\x45RROR\x10\x02\x12\n\n\x06KILLED\x10\x03\x42\xbf\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0cScanRunProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\n;google/cloud/websecurityscanner_v1beta/proto/scan_run.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x19google/api/resource.proto\x1aGgoogle/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace.proto\x1aIgoogle/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xd2\x06\n\x07ScanRun\x12\x0c\n\x04name\x18\x01 \x01(\t\x12W\n\x0f\x65xecution_state\x18\x02 \x01(\x0e\x32>.google.cloud.websecurityscanner.v1beta.ScanRun.ExecutionState\x12Q\n\x0cresult_state\x18\x03 \x01(\x0e\x32;.google.cloud.websecurityscanner.v1beta.ScanRun.ResultState\x12.\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1a\n\x12urls_crawled_count\x18\x06 \x01(\x03\x12\x19\n\x11urls_tested_count\x18\x07 \x01(\x03\x12\x1b\n\x13has_vulnerabilities\x18\x08 \x01(\x08\x12\x18\n\x10progress_percent\x18\t \x01(\x05\x12N\n\x0b\x65rror_trace\x18\n \x01(\x0b\x32\x39.google.cloud.websecurityscanner.v1beta.ScanRunErrorTrace\x12S\n\x0ewarning_traces\x18\x0b \x03(\x0b\x32;.google.cloud.websecurityscanner.v1beta.ScanRunWarningTrace"Y\n\x0e\x45xecutionState\x12\x1f\n\x1b\x45XECUTION_STATE_UNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x0c\n\x08SCANNING\x10\x02\x12\x0c\n\x08\x46INISHED\x10\x03"O\n\x0bResultState\x12\x1c\n\x18RESULT_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07SUCCESS\x10\x01\x12\t\n\x05\x45RROR\x10\x02\x12\n\n\x06KILLED\x10\x03:p\xea\x41m\n)websecurityscanner.googleapis.com/ScanRun\x12@projects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}B\xbf\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0cScanRunProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__error__trace__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__warning__trace__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, @@ -69,8 +69,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=881, - serialized_end=970, + serialized_start=878, + serialized_end=967, ) _sym_db.RegisterEnumDescriptor(_SCANRUN_EXECUTIONSTATE) @@ -99,8 +99,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=972, - serialized_end=1051, + serialized_start=969, + serialized_end=1048, ) _sym_db.RegisterEnumDescriptor(_SCANRUN_RESULTSTATE) @@ -314,13 +314,15 @@ extensions=[], nested_types=[], enum_types=[_SCANRUN_EXECUTIONSTATE, _SCANRUN_RESULTSTATE], - serialized_options=None, + serialized_options=_b( + "\352Am\n)websecurityscanner.googleapis.com/ScanRun\022@projects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=315, - serialized_end=1051, + serialized_start=312, + serialized_end=1162, ) _SCANRUN.fields_by_name["execution_state"].enum_type = _SCANRUN_EXECUTIONSTATE @@ -358,46 +360,43 @@ Attributes: name: - Output only. The resource name of the ScanRun. The name - follows the format of 'projects/{projectId}/scanConfigs/{scanC - onfigId}/scanRuns/{scanRunId}'. The ScanRun IDs are generated - by the system. + The resource name of the ScanRun. The name follows the format + of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{ + scanRunId}'. The ScanRun IDs are generated by the system. execution_state: - Output only. The execution state of the ScanRun. + The execution state of the ScanRun. result_state: - Output only. The result state of the ScanRun. This field is - only available after the execution state reaches "FINISHED". + The result state of the ScanRun. This field is only available + after the execution state reaches "FINISHED". start_time: - Output only. The time at which the ScanRun started. + The time at which the ScanRun started. end_time: - Output only. The time at which the ScanRun reached termination - state - that the ScanRun is either finished or stopped by - user. + The time at which the ScanRun reached termination state - that + the ScanRun is either finished or stopped by user. urls_crawled_count: - Output only. The number of URLs crawled during this ScanRun. - If the scan is in progress, the value represents the number of - URLs crawled up to now. + The number of URLs crawled during this ScanRun. If the scan is + in progress, the value represents the number of URLs crawled + up to now. urls_tested_count: - Output only. The number of URLs tested during this ScanRun. If - the scan is in progress, the value represents the number of - URLs tested up to now. The number of URLs tested is usually - larger than the number URLS crawled because typically a - crawled URL is tested with multiple test payloads. + The number of URLs tested during this ScanRun. If the scan is + in progress, the value represents the number of URLs tested up + to now. The number of URLs tested is usually larger than the + number URLS crawled because typically a crawled URL is tested + with multiple test payloads. has_vulnerabilities: - Output only. Whether the scan run has found any - vulnerabilities. + Whether the scan run has found any vulnerabilities. progress_percent: - Output only. The percentage of total completion ranging from 0 - to 100. If the scan is in queue, the value is 0. If the scan - is running, the value ranges from 0 to 100. If the scan is - finished, the value is 100. + The percentage of total completion ranging from 0 to 100. If + the scan is in queue, the value is 0. If the scan is running, + the value ranges from 0 to 100. If the scan is finished, the + value is 100. error_trace: - Output only. If result\_state is an ERROR, this field provides - the primary reason for scan's termination and more details, if - such are available. + If result\_state is an ERROR, this field provides the primary + reason for scan's termination and more details, if such are + available. warning_traces: - Output only. A list of warnings, if such are encountered - during this scan run. + A list of warnings, if such are encountered during this scan + run. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.ScanRun) ), @@ -406,4 +405,5 @@ DESCRIPTOR._options = None +_SCANRUN._options = None # @@protoc_insertion_point(module_scope) diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace.proto index e461ad0b55b3..8207a02906a4 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace.proto @@ -29,7 +29,7 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; message ScanRunWarningTrace { // Output only. // Defines a warning message code. - // Next id: 5 + // Next id: 6 enum Code { // Default value is never used. CODE_UNSPECIFIED = 0; @@ -52,7 +52,6 @@ message ScanRunWarningTrace { BLOCKED_BY_IAP = 4; } - // Output only. // Indicates the warning code. Code code = 1; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace_pb2.py index f03a9418a590..cd94b1708679 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace_pb2.py @@ -130,7 +130,7 @@ Attributes: code: - Output only. Indicates the warning code. + Indicates the warning code. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.ScanRunWarningTrace) ), diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner.proto index d328a2860214..9ea6207c2420 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner.proto @@ -18,6 +18,9 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/websecurityscanner/v1beta/crawled_url.proto"; import "google/cloud/websecurityscanner/v1beta/finding.proto"; import "google/cloud/websecurityscanner/v1beta/finding_type_stats.proto"; @@ -36,12 +39,16 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; // applications hosted on Google Cloud Platform. It crawls your application, and // attempts to exercise as many user inputs and event handlers as possible. service WebSecurityScanner { + option (google.api.default_host) = "websecurityscanner.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Creates a new ScanConfig. rpc CreateScanConfig(CreateScanConfigRequest) returns (ScanConfig) { option (google.api.http) = { post: "/v1beta/{parent=projects/*}/scanConfigs" body: "scan_config" }; + option (google.api.method_signature) = "parent,scan_config"; } // Deletes an existing ScanConfig and its child resources. @@ -49,6 +56,7 @@ service WebSecurityScanner { option (google.api.http) = { delete: "/v1beta/{name=projects/*/scanConfigs/*}" }; + option (google.api.method_signature) = "name"; } // Gets a ScanConfig. @@ -56,6 +64,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{name=projects/*/scanConfigs/*}" }; + option (google.api.method_signature) = "name"; } // Lists ScanConfigs under a given project. @@ -63,6 +72,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{parent=projects/*}/scanConfigs" }; + option (google.api.method_signature) = "parent"; } // Updates a ScanConfig. This method support partial update of a ScanConfig. @@ -71,6 +81,7 @@ service WebSecurityScanner { patch: "/v1beta/{scan_config.name=projects/*/scanConfigs/*}" body: "scan_config" }; + option (google.api.method_signature) = "scan_config,update_mask"; } // Start a ScanRun according to the given ScanConfig. @@ -79,6 +90,7 @@ service WebSecurityScanner { post: "/v1beta/{name=projects/*/scanConfigs/*}:start" body: "*" }; + option (google.api.method_signature) = "name"; } // Gets a ScanRun. @@ -86,6 +98,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}" }; + option (google.api.method_signature) = "name"; } // Lists ScanRuns under a given ScanConfig, in descending order of ScanRun @@ -94,6 +107,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{parent=projects/*/scanConfigs/*}/scanRuns" }; + option (google.api.method_signature) = "parent"; } // Stops a ScanRun. The stopped ScanRun is returned. @@ -102,6 +116,7 @@ service WebSecurityScanner { post: "/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}:stop" body: "*" }; + option (google.api.method_signature) = "name"; } // List CrawledUrls under a given ScanRun. @@ -109,6 +124,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls" }; + option (google.api.method_signature) = "parent"; } // Gets a Finding. @@ -116,6 +132,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}" }; + option (google.api.method_signature) = "name"; } // List Findings under a given ScanRun. @@ -123,6 +140,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings" }; + option (google.api.method_signature) = "parent,filter"; } // List all FindingTypeStats under a given ScanRun. @@ -130,43 +148,59 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats" }; + option (google.api.method_signature) = "parent"; } } // Request for the `CreateScanConfig` method. message CreateScanConfigRequest { - // Required. - // The parent resource name where the scan is created, which should be a + // Required. The parent resource name where the scan is created, which should be a // project resource name in the format 'projects/{projectId}'. - string parent = 1; - - // Required. - // The ScanConfig to be created. - ScanConfig scan_config = 2; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + // Required. The ScanConfig to be created. + ScanConfig scan_config = 2 [(google.api.field_behavior) = REQUIRED]; } // Request for the `DeleteScanConfig` method. message DeleteScanConfigRequest { - // Required. - // The resource name of the ScanConfig to be deleted. The name follows the + // Required. The resource name of the ScanConfig to be deleted. The name follows the // format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanConfig" + } + ]; } // Request for the `GetScanConfig` method. message GetScanConfigRequest { - // Required. - // The resource name of the ScanConfig to be returned. The name follows the + // Required. The resource name of the ScanConfig to be returned. The name follows the // format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanConfig" + } + ]; } // Request for the `ListScanConfigs` method. message ListScanConfigsRequest { - // Required. - // The parent resource name, which should be a project resource name in the + // Required. The parent resource name, which should be a project resource name in the // format 'projects/{projectId}'. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; // A token identifying a page of results to be returned. This should be a // `next_page_token` value returned from a previous List request. @@ -181,17 +215,15 @@ message ListScanConfigsRequest { // Request for the `UpdateScanConfigRequest` method. message UpdateScanConfigRequest { - // Required. - // The ScanConfig to be updated. The name field must be set to identify the + // Required. The ScanConfig to be updated. The name field must be set to identify the // resource to be updated. The values of fields not covered by the mask // will be ignored. - ScanConfig scan_config = 2; + ScanConfig scan_config = 2 [(google.api.field_behavior) = REQUIRED]; - // Required. - // The update mask applies to the resource. For the `FieldMask` definition, + // Required. The update mask applies to the resource. For the `FieldMask` definition, // see // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - google.protobuf.FieldMask update_mask = 3; + google.protobuf.FieldMask update_mask = 3 [(google.api.field_behavior) = REQUIRED]; } // Response for the `ListScanConfigs` method. @@ -206,27 +238,39 @@ message ListScanConfigsResponse { // Request for the `StartScanRun` method. message StartScanRunRequest { - // Required. - // The resource name of the ScanConfig to be used. The name follows the + // Required. The resource name of the ScanConfig to be used. The name follows the // format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanConfig" + } + ]; } // Request for the `GetScanRun` method. message GetScanRunRequest { - // Required. - // The resource name of the ScanRun to be returned. The name follows the + // Required. The resource name of the ScanRun to be returned. The name follows the // format of // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; } // Request for the `ListScanRuns` method. message ListScanRunsRequest { - // Required. - // The parent resource name, which should be a scan resource name in the + // Required. The parent resource name, which should be a scan resource name in the // format 'projects/{projectId}/scanConfigs/{scanConfigId}'. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanConfig" + } + ]; // A token identifying a page of results to be returned. This should be a // `next_page_token` value returned from a previous List request. @@ -251,20 +295,28 @@ message ListScanRunsResponse { // Request for the `StopScanRun` method. message StopScanRunRequest { - // Required. - // The resource name of the ScanRun to be stopped. The name follows the + // Required. The resource name of the ScanRun to be stopped. The name follows the // format of // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; } // Request for the `ListCrawledUrls` method. message ListCrawledUrlsRequest { - // Required. - // The parent resource name, which should be a scan run resource name in the + // Required. The parent resource name, which should be a scan run resource name in the // format // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; // A token identifying a page of results to be returned. This should be a // `next_page_token` value returned from a previous List request. @@ -289,26 +341,34 @@ message ListCrawledUrlsResponse { // Request for the `GetFinding` method. message GetFindingRequest { - // Required. - // The resource name of the Finding to be returned. The name follows the + // Required. The resource name of the Finding to be returned. The name follows the // format of // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}/findings/{findingId}'. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/Finding" + } + ]; } // Request for the `ListFindings` method. message ListFindingsRequest { - // Required. - // The parent resource name, which should be a scan run resource name in the + // Required. The parent resource name, which should be a scan run resource name in the // format // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - string parent = 1; - - // The filter expression. The expression must be in the format: + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; + + // Required. The filter expression. The expression must be in the format: // . // Supported field: 'finding_type'. // Supported operator: '='. - string filter = 2; + string filter = 2 [(google.api.field_behavior) = REQUIRED]; // A token identifying a page of results to be returned. This should be a // `next_page_token` value returned from a previous List request. @@ -333,11 +393,15 @@ message ListFindingsResponse { // Request for the `ListFindingTypeStats` method. message ListFindingTypeStatsRequest { - // Required. - // The parent resource name, which should be a scan run resource name in the + // Required. The parent resource name, which should be a scan run resource name in the // format // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; } // Response for the `ListFindingTypeStats` method. diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner_pb2.py index d1a34a30f035..ad3f1d89f4b2 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.websecurityscanner_v1beta.proto import ( crawled_url_pb2 as google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_crawled__url__pb2, ) @@ -43,10 +46,13 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\027WebSecurityScannerProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\nGgoogle/cloud/websecurityscanner_v1beta/proto/web_security_scanner.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto\x1a>google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto\x1a:google/cloud/websecurityscanner_v1beta/proto/finding.proto\x1a\x45google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto\x1a>google/cloud/websecurityscanner_v1beta/proto/scan_config.proto\x1a;google/cloud/websecurityscanner_v1beta/proto/scan_run.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"r\n\x17\x43reateScanConfigRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12G\n\x0bscan_config\x18\x02 \x01(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"\'\n\x17\x44\x65leteScanConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"$\n\x14GetScanConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"O\n\x16ListScanConfigsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"\x93\x01\n\x17UpdateScanConfigRequest\x12G\n\x0bscan_config\x18\x02 \x01(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.ScanConfig\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"|\n\x17ListScanConfigsResponse\x12H\n\x0cscan_configs\x18\x01 \x03(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.ScanConfig\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13StartScanRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"!\n\x11GetScanRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"L\n\x13ListScanRunsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"s\n\x14ListScanRunsResponse\x12\x42\n\tscan_runs\x18\x01 \x03(\x0b\x32/.google.cloud.websecurityscanner.v1beta.ScanRun\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t""\n\x12StopScanRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"O\n\x16ListCrawledUrlsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"|\n\x17ListCrawledUrlsResponse\x12H\n\x0c\x63rawled_urls\x18\x01 \x03(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.CrawledUrl\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"!\n\x11GetFindingRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x13ListFindingsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"r\n\x14ListFindingsResponse\x12\x41\n\x08\x66indings\x18\x01 \x03(\x0b\x32/.google.cloud.websecurityscanner.v1beta.Finding\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"-\n\x1bListFindingTypeStatsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t"t\n\x1cListFindingTypeStatsResponse\x12T\n\x12\x66inding_type_stats\x18\x01 \x03(\x0b\x32\x38.google.cloud.websecurityscanner.v1beta.FindingTypeStats2\xaa\x14\n\x12WebSecurityScanner\x12\xc5\x01\n\x10\x43reateScanConfig\x12?.google.cloud.websecurityscanner.v1beta.CreateScanConfigRequest\x1a\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"<\x82\xd3\xe4\x93\x02\x36"\'/v1beta/{parent=projects/*}/scanConfigs:\x0bscan_config\x12\x9c\x01\n\x10\x44\x65leteScanConfig\x12?.google.cloud.websecurityscanner.v1beta.DeleteScanConfigRequest\x1a\x16.google.protobuf.Empty"/\x82\xd3\xe4\x93\x02)*\'/v1beta/{name=projects/*/scanConfigs/*}\x12\xb2\x01\n\rGetScanConfig\x12<.google.cloud.websecurityscanner.v1beta.GetScanConfigRequest\x1a\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"/\x82\xd3\xe4\x93\x02)\x12\'/v1beta/{name=projects/*/scanConfigs/*}\x12\xc3\x01\n\x0fListScanConfigs\x12>.google.cloud.websecurityscanner.v1beta.ListScanConfigsRequest\x1a?.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse"/\x82\xd3\xe4\x93\x02)\x12\'/v1beta/{parent=projects/*}/scanConfigs\x12\xd1\x01\n\x10UpdateScanConfig\x12?.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest\x1a\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"H\x82\xd3\xe4\x93\x02\x42\x32\x33/v1beta/{scan_config.name=projects/*/scanConfigs/*}:\x0bscan_config\x12\xb6\x01\n\x0cStartScanRun\x12;.google.cloud.websecurityscanner.v1beta.StartScanRunRequest\x1a/.google.cloud.websecurityscanner.v1beta.ScanRun"8\x82\xd3\xe4\x93\x02\x32"-/v1beta/{name=projects/*/scanConfigs/*}:start:\x01*\x12\xb4\x01\n\nGetScanRun\x12\x39.google.cloud.websecurityscanner.v1beta.GetScanRunRequest\x1a/.google.cloud.websecurityscanner.v1beta.ScanRun":\x82\xd3\xe4\x93\x02\x34\x12\x32/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}\x12\xc5\x01\n\x0cListScanRuns\x12;.google.cloud.websecurityscanner.v1beta.ListScanRunsRequest\x1a<.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse":\x82\xd3\xe4\x93\x02\x34\x12\x32/v1beta/{parent=projects/*/scanConfigs/*}/scanRuns\x12\xbe\x01\n\x0bStopScanRun\x12:.google.cloud.websecurityscanner.v1beta.StopScanRunRequest\x1a/.google.cloud.websecurityscanner.v1beta.ScanRun"B\x82\xd3\xe4\x93\x02<"7/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}:stop:\x01*\x12\xdc\x01\n\x0fListCrawledUrls\x12>.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsRequest\x1a?.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse"H\x82\xd3\xe4\x93\x02\x42\x12@/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls\x12\xbf\x01\n\nGetFinding\x12\x39.google.cloud.websecurityscanner.v1beta.GetFindingRequest\x1a/.google.cloud.websecurityscanner.v1beta.Finding"E\x82\xd3\xe4\x93\x02?\x12=/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}\x12\xd0\x01\n\x0cListFindings\x12;.google.cloud.websecurityscanner.v1beta.ListFindingsRequest\x1a<.google.cloud.websecurityscanner.v1beta.ListFindingsResponse"E\x82\xd3\xe4\x93\x02?\x12=/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings\x12\xf0\x01\n\x14ListFindingTypeStats\x12\x43.google.cloud.websecurityscanner.v1beta.ListFindingTypeStatsRequest\x1a\x44.google.cloud.websecurityscanner.v1beta.ListFindingTypeStatsResponse"M\x82\xd3\xe4\x93\x02G\x12\x45/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStatsB\xca\x01\n*com.google.cloud.websecurityscanner.v1betaB\x17WebSecurityScannerProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\nGgoogle/cloud/websecurityscanner_v1beta/proto/web_security_scanner.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a>google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto\x1a:google/cloud/websecurityscanner_v1beta/proto/finding.proto\x1a\x45google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto\x1a>google/cloud/websecurityscanner_v1beta/proto/scan_config.proto\x1a;google/cloud/websecurityscanner_v1beta/proto/scan_run.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\xac\x01\n\x17\x43reateScanConfigRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12L\n\x0bscan_config\x18\x02 \x01(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.ScanConfigB\x03\xe0\x41\x02"]\n\x17\x44\x65leteScanConfigRequest\x12\x42\n\x04name\x18\x01 \x01(\tB4\xe0\x41\x02\xfa\x41.\n,websecurityscanner.googleapis.com/ScanConfig"Z\n\x14GetScanConfigRequest\x12\x42\n\x04name\x18\x01 \x01(\tB4\xe0\x41\x02\xfa\x41.\n,websecurityscanner.googleapis.com/ScanConfig"\x84\x01\n\x16ListScanConfigsRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"\x9d\x01\n\x17UpdateScanConfigRequest\x12L\n\x0bscan_config\x18\x02 \x01(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.ScanConfigB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"|\n\x17ListScanConfigsResponse\x12H\n\x0cscan_configs\x18\x01 \x03(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.ScanConfig\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Y\n\x13StartScanRunRequest\x12\x42\n\x04name\x18\x01 \x01(\tB4\xe0\x41\x02\xfa\x41.\n,websecurityscanner.googleapis.com/ScanConfig"T\n\x11GetScanRunRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)websecurityscanner.googleapis.com/ScanRun"\x82\x01\n\x13ListScanRunsRequest\x12\x44\n\x06parent\x18\x01 \x01(\tB4\xe0\x41\x02\xfa\x41.\n,websecurityscanner.googleapis.com/ScanConfig\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"s\n\x14ListScanRunsResponse\x12\x42\n\tscan_runs\x18\x01 \x03(\x0b\x32/.google.cloud.websecurityscanner.v1beta.ScanRun\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x12StopScanRunRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)websecurityscanner.googleapis.com/ScanRun"\x82\x01\n\x16ListCrawledUrlsRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)websecurityscanner.googleapis.com/ScanRun\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"|\n\x17ListCrawledUrlsResponse\x12H\n\x0c\x63rawled_urls\x18\x01 \x03(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.CrawledUrl\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"T\n\x11GetFindingRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)websecurityscanner.googleapis.com/Finding"\x94\x01\n\x13ListFindingsRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)websecurityscanner.googleapis.com/ScanRun\x12\x13\n\x06\x66ilter\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"r\n\x14ListFindingsResponse\x12\x41\n\x08\x66indings\x18\x01 \x03(\x0b\x32/.google.cloud.websecurityscanner.v1beta.Finding\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"`\n\x1bListFindingTypeStatsRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)websecurityscanner.googleapis.com/ScanRun"t\n\x1cListFindingTypeStatsResponse\x12T\n\x12\x66inding_type_stats\x18\x01 \x03(\x0b\x32\x38.google.cloud.websecurityscanner.v1beta.FindingTypeStats2\x8e\x16\n\x12WebSecurityScanner\x12\xda\x01\n\x10\x43reateScanConfig\x12?.google.cloud.websecurityscanner.v1beta.CreateScanConfigRequest\x1a\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"Q\x82\xd3\xe4\x93\x02\x36"\'/v1beta/{parent=projects/*}/scanConfigs:\x0bscan_config\xda\x41\x12parent,scan_config\x12\xa3\x01\n\x10\x44\x65leteScanConfig\x12?.google.cloud.websecurityscanner.v1beta.DeleteScanConfigRequest\x1a\x16.google.protobuf.Empty"6\x82\xd3\xe4\x93\x02)*\'/v1beta/{name=projects/*/scanConfigs/*}\xda\x41\x04name\x12\xb9\x01\n\rGetScanConfig\x12<.google.cloud.websecurityscanner.v1beta.GetScanConfigRequest\x1a\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"6\x82\xd3\xe4\x93\x02)\x12\'/v1beta/{name=projects/*/scanConfigs/*}\xda\x41\x04name\x12\xcc\x01\n\x0fListScanConfigs\x12>.google.cloud.websecurityscanner.v1beta.ListScanConfigsRequest\x1a?.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse"8\x82\xd3\xe4\x93\x02)\x12\'/v1beta/{parent=projects/*}/scanConfigs\xda\x41\x06parent\x12\xeb\x01\n\x10UpdateScanConfig\x12?.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest\x1a\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"b\x82\xd3\xe4\x93\x02\x42\x32\x33/v1beta/{scan_config.name=projects/*/scanConfigs/*}:\x0bscan_config\xda\x41\x17scan_config,update_mask\x12\xbd\x01\n\x0cStartScanRun\x12;.google.cloud.websecurityscanner.v1beta.StartScanRunRequest\x1a/.google.cloud.websecurityscanner.v1beta.ScanRun"?\x82\xd3\xe4\x93\x02\x32"-/v1beta/{name=projects/*/scanConfigs/*}:start:\x01*\xda\x41\x04name\x12\xbb\x01\n\nGetScanRun\x12\x39.google.cloud.websecurityscanner.v1beta.GetScanRunRequest\x1a/.google.cloud.websecurityscanner.v1beta.ScanRun"A\x82\xd3\xe4\x93\x02\x34\x12\x32/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}\xda\x41\x04name\x12\xce\x01\n\x0cListScanRuns\x12;.google.cloud.websecurityscanner.v1beta.ListScanRunsRequest\x1a<.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse"C\x82\xd3\xe4\x93\x02\x34\x12\x32/v1beta/{parent=projects/*/scanConfigs/*}/scanRuns\xda\x41\x06parent\x12\xc5\x01\n\x0bStopScanRun\x12:.google.cloud.websecurityscanner.v1beta.StopScanRunRequest\x1a/.google.cloud.websecurityscanner.v1beta.ScanRun"I\x82\xd3\xe4\x93\x02<"7/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}:stop:\x01*\xda\x41\x04name\x12\xe5\x01\n\x0fListCrawledUrls\x12>.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsRequest\x1a?.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse"Q\x82\xd3\xe4\x93\x02\x42\x12@/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls\xda\x41\x06parent\x12\xc6\x01\n\nGetFinding\x12\x39.google.cloud.websecurityscanner.v1beta.GetFindingRequest\x1a/.google.cloud.websecurityscanner.v1beta.Finding"L\x82\xd3\xe4\x93\x02?\x12=/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}\xda\x41\x04name\x12\xe0\x01\n\x0cListFindings\x12;.google.cloud.websecurityscanner.v1beta.ListFindingsRequest\x1a<.google.cloud.websecurityscanner.v1beta.ListFindingsResponse"U\x82\xd3\xe4\x93\x02?\x12=/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings\xda\x41\rparent,filter\x12\xf9\x01\n\x14ListFindingTypeStats\x12\x43.google.cloud.websecurityscanner.v1beta.ListFindingTypeStatsRequest\x1a\x44.google.cloud.websecurityscanner.v1beta.ListFindingTypeStatsResponse"V\x82\xd3\xe4\x93\x02G\x12\x45/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats\xda\x41\x06parent\x1aU\xca\x41!websecurityscanner.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xca\x01\n*com.google.cloud.websecurityscanner.v1betaB\x17WebSecurityScannerProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_crawled__url__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_finding__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_finding__type__stats__pb2.DESCRIPTOR, @@ -80,7 +86,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -98,7 +106,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -110,8 +118,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=528, - serialized_end=642, + serialized_start=614, + serialized_end=786, ) @@ -137,7 +145,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ) ], @@ -149,8 +159,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=644, - serialized_end=683, + serialized_start=788, + serialized_end=881, ) @@ -176,7 +186,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ) ], @@ -188,8 +200,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=685, - serialized_end=721, + serialized_start=883, + serialized_end=973, ) @@ -215,7 +227,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -263,8 +277,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=723, - serialized_end=802, + serialized_start=976, + serialized_end=1108, ) @@ -290,7 +304,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -308,7 +322,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -320,8 +334,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=805, - serialized_end=952, + serialized_start=1111, + serialized_end=1268, ) @@ -377,8 +391,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=954, - serialized_end=1078, + serialized_start=1270, + serialized_end=1394, ) @@ -404,7 +418,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ) ], @@ -416,8 +432,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1080, - serialized_end=1115, + serialized_start=1396, + serialized_end=1485, ) @@ -443,7 +459,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ) ], @@ -455,8 +473,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1117, - serialized_end=1150, + serialized_start=1487, + serialized_end=1571, ) @@ -482,7 +500,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -530,8 +550,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1152, - serialized_end=1228, + serialized_start=1574, + serialized_end=1704, ) @@ -587,8 +607,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1230, - serialized_end=1345, + serialized_start=1706, + serialized_end=1821, ) @@ -614,7 +634,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ) ], @@ -626,8 +648,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1347, - serialized_end=1381, + serialized_start=1823, + serialized_end=1908, ) @@ -653,7 +675,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -701,8 +725,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1383, - serialized_end=1462, + serialized_start=1911, + serialized_end=2041, ) @@ -758,8 +782,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1464, - serialized_end=1588, + serialized_start=2043, + serialized_end=2167, ) @@ -785,7 +809,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/Finding" + ), file=DESCRIPTOR, ) ], @@ -797,8 +823,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1590, - serialized_end=1623, + serialized_start=2169, + serialized_end=2253, ) @@ -824,7 +850,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -842,7 +870,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -890,8 +918,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1625, - serialized_end=1717, + serialized_start=2256, + serialized_end=2404, ) @@ -947,8 +975,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1719, - serialized_end=1833, + serialized_start=2406, + serialized_end=2520, ) @@ -974,7 +1002,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ) ], @@ -986,8 +1016,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1835, - serialized_end=1880, + serialized_start=2522, + serialized_end=2618, ) @@ -1025,8 +1055,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1882, - serialized_end=1998, + serialized_start=2620, + serialized_end=2736, ) _CREATESCANCONFIGREQUEST.fields_by_name[ @@ -1422,8 +1452,9 @@ resource name in the format 'projects/{projectId}/scanConfigs/ {scanConfigId}/scanRuns/{scanRunId}'. filter: - The filter expression. The expression must be in the format: . - Supported field: 'finding\_type'. Supported operator: '='. + Required. The filter expression. The expression must be in the + format: . Supported field: 'finding\_type'. Supported + operator: '='. page_token: A token identifying a page of results to be returned. This should be a ``next_page_token`` value returned from a previous @@ -1500,15 +1531,33 @@ DESCRIPTOR._options = None +_CREATESCANCONFIGREQUEST.fields_by_name["parent"]._options = None +_CREATESCANCONFIGREQUEST.fields_by_name["scan_config"]._options = None +_DELETESCANCONFIGREQUEST.fields_by_name["name"]._options = None +_GETSCANCONFIGREQUEST.fields_by_name["name"]._options = None +_LISTSCANCONFIGSREQUEST.fields_by_name["parent"]._options = None +_UPDATESCANCONFIGREQUEST.fields_by_name["scan_config"]._options = None +_UPDATESCANCONFIGREQUEST.fields_by_name["update_mask"]._options = None +_STARTSCANRUNREQUEST.fields_by_name["name"]._options = None +_GETSCANRUNREQUEST.fields_by_name["name"]._options = None +_LISTSCANRUNSREQUEST.fields_by_name["parent"]._options = None +_STOPSCANRUNREQUEST.fields_by_name["name"]._options = None +_LISTCRAWLEDURLSREQUEST.fields_by_name["parent"]._options = None +_GETFINDINGREQUEST.fields_by_name["name"]._options = None +_LISTFINDINGSREQUEST.fields_by_name["parent"]._options = None +_LISTFINDINGSREQUEST.fields_by_name["filter"]._options = None +_LISTFINDINGTYPESTATSREQUEST.fields_by_name["parent"]._options = None _WEBSECURITYSCANNER = _descriptor.ServiceDescriptor( name="WebSecurityScanner", full_name="google.cloud.websecurityscanner.v1beta.WebSecurityScanner", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=2001, - serialized_end=4603, + serialized_options=_b( + "\312A!websecurityscanner.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=2739, + serialized_end=5569, methods=[ _descriptor.MethodDescriptor( name="CreateScanConfig", @@ -1518,7 +1567,7 @@ input_type=_CREATESCANCONFIGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__config__pb2._SCANCONFIG, serialized_options=_b( - "\202\323\344\223\0026\"'/v1beta/{parent=projects/*}/scanConfigs:\013scan_config" + "\202\323\344\223\0026\"'/v1beta/{parent=projects/*}/scanConfigs:\013scan_config\332A\022parent,scan_config" ), ), _descriptor.MethodDescriptor( @@ -1529,7 +1578,7 @@ input_type=_DELETESCANCONFIGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002)*'/v1beta/{name=projects/*/scanConfigs/*}" + "\202\323\344\223\002)*'/v1beta/{name=projects/*/scanConfigs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1540,7 +1589,7 @@ input_type=_GETSCANCONFIGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__config__pb2._SCANCONFIG, serialized_options=_b( - "\202\323\344\223\002)\022'/v1beta/{name=projects/*/scanConfigs/*}" + "\202\323\344\223\002)\022'/v1beta/{name=projects/*/scanConfigs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1551,7 +1600,7 @@ input_type=_LISTSCANCONFIGSREQUEST, output_type=_LISTSCANCONFIGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002)\022'/v1beta/{parent=projects/*}/scanConfigs" + "\202\323\344\223\002)\022'/v1beta/{parent=projects/*}/scanConfigs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1562,7 +1611,7 @@ input_type=_UPDATESCANCONFIGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__config__pb2._SCANCONFIG, serialized_options=_b( - "\202\323\344\223\002B23/v1beta/{scan_config.name=projects/*/scanConfigs/*}:\013scan_config" + "\202\323\344\223\002B23/v1beta/{scan_config.name=projects/*/scanConfigs/*}:\013scan_config\332A\027scan_config,update_mask" ), ), _descriptor.MethodDescriptor( @@ -1573,7 +1622,7 @@ input_type=_STARTSCANRUNREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__pb2._SCANRUN, serialized_options=_b( - '\202\323\344\223\0022"-/v1beta/{name=projects/*/scanConfigs/*}:start:\001*' + '\202\323\344\223\0022"-/v1beta/{name=projects/*/scanConfigs/*}:start:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -1584,7 +1633,7 @@ input_type=_GETSCANRUNREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__pb2._SCANRUN, serialized_options=_b( - "\202\323\344\223\0024\0222/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}" + "\202\323\344\223\0024\0222/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1595,7 +1644,7 @@ input_type=_LISTSCANRUNSREQUEST, output_type=_LISTSCANRUNSRESPONSE, serialized_options=_b( - "\202\323\344\223\0024\0222/v1beta/{parent=projects/*/scanConfigs/*}/scanRuns" + "\202\323\344\223\0024\0222/v1beta/{parent=projects/*/scanConfigs/*}/scanRuns\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1606,7 +1655,7 @@ input_type=_STOPSCANRUNREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__pb2._SCANRUN, serialized_options=_b( - '\202\323\344\223\002<"7/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}:stop:\001*' + '\202\323\344\223\002<"7/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}:stop:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -1617,7 +1666,7 @@ input_type=_LISTCRAWLEDURLSREQUEST, output_type=_LISTCRAWLEDURLSRESPONSE, serialized_options=_b( - "\202\323\344\223\002B\022@/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls" + "\202\323\344\223\002B\022@/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1628,7 +1677,7 @@ input_type=_GETFINDINGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_finding__pb2._FINDING, serialized_options=_b( - "\202\323\344\223\002?\022=/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}" + "\202\323\344\223\002?\022=/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1639,7 +1688,7 @@ input_type=_LISTFINDINGSREQUEST, output_type=_LISTFINDINGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002?\022=/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings" + "\202\323\344\223\002?\022=/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings\332A\rparent,filter" ), ), _descriptor.MethodDescriptor( @@ -1650,7 +1699,7 @@ input_type=_LISTFINDINGTYPESTATSREQUEST, output_type=_LISTFINDINGTYPESTATSRESPONSE, serialized_options=_b( - "\202\323\344\223\002G\022E/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats" + "\202\323\344\223\002G\022E/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats\332A\006parent" ), ), ], diff --git a/websecurityscanner/synth.metadata b/websecurityscanner/synth.metadata index b39c7e874de8..6ce821633e5b 100644 --- a/websecurityscanner/synth.metadata +++ b/websecurityscanner/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:52:30.431818Z", + "updateTime": "2019-10-10T12:50:02.032093Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "10f91fa12f70e8e0209a45fc10807ed1f77c7e4e", + "internalRef": "273826591" } }, {